You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2015/09/25 03:40:05 UTC

[27/50] [abbrv] hive git commit: HIVE-11783: Extending HPL/SQL parser (Dmitry Tolpeko reviewed by Alan Gates)

HIVE-11783: Extending HPL/SQL parser (Dmitry Tolpeko reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/06790789
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/06790789
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/06790789

Branch: refs/heads/beeline-cli
Commit: 06790789bddb35f60706071d8d3682d434fa05dd
Parents: d51c62a
Author: Dmitry Tolpeko <dm...@gmail.com>
Authored: Tue Sep 22 06:38:06 2015 -0700
Committer: Dmitry Tolpeko <dm...@gmail.com>
Committed: Tue Sep 22 06:38:06 2015 -0700

----------------------------------------------------------------------
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4     |  70 ++--
 .../main/java/org/apache/hive/hplsql/Cmp.java   | 314 ++++++++++++++++++
 .../java/org/apache/hive/hplsql/Column.java     |  29 +-
 .../main/java/org/apache/hive/hplsql/Conn.java  |  21 ++
 .../main/java/org/apache/hive/hplsql/Copy.java  |  50 ++-
 .../main/java/org/apache/hive/hplsql/Exec.java  |  66 +++-
 .../java/org/apache/hive/hplsql/Expression.java |  33 +-
 .../main/java/org/apache/hive/hplsql/File.java  |  18 +-
 .../main/java/org/apache/hive/hplsql/Meta.java  |  28 +-
 .../main/java/org/apache/hive/hplsql/Query.java |  18 ++
 .../java/org/apache/hive/hplsql/Select.java     |  23 +-
 .../main/java/org/apache/hive/hplsql/Stmt.java  |   8 +-
 .../main/java/org/apache/hive/hplsql/Var.java   | 110 ++++++-
 .../apache/hive/hplsql/functions/Function.java  |   6 +-
 .../hive/hplsql/functions/FunctionMisc.java     | 121 +++++++
 .../org/apache/hive/hplsql/TestHplsqlLocal.java |  18 ++
 .../apache/hive/hplsql/TestHplsqlOffline.java   |   5 +
 hplsql/src/test/queries/db/cmp_row_count.sql    |   4 +
 hplsql/src/test/queries/db/cmp_sum.sql          |   3 +
 hplsql/src/test/queries/db/copy_to_file.sql     |   2 +
 hplsql/src/test/queries/db/copy_to_hdfs.sql     |   2 +
 hplsql/src/test/queries/db/copy_to_table.sql    |   2 +
 hplsql/src/test/queries/db/part_count.sql       |  17 +
 hplsql/src/test/queries/db/part_count_by.sql    |   4 +
 hplsql/src/test/queries/db/schema.sql           |  32 ++
 hplsql/src/test/queries/db/select_into.sql      |  20 +-
 hplsql/src/test/queries/db/select_into2.sql     |  17 +
 .../test/queries/local/create_procedure2.sql    |  16 +
 hplsql/src/test/queries/local/if2.sql           |   5 +
 hplsql/src/test/queries/local/include.sql       |   2 +
 hplsql/src/test/queries/local/include_file.sql  |   1 +
 hplsql/src/test/queries/local/mult_div.sql      |   8 +
 hplsql/src/test/queries/offline/select_db2.sql  |   5 +
 .../src/test/results/db/cmp_row_count.out.txt   |  12 +
 hplsql/src/test/results/db/cmp_sum.out.txt      | 320 +++++++++++++++++++
 hplsql/src/test/results/db/copy_to_file.out.txt |   6 +
 hplsql/src/test/results/db/copy_to_hdfs.out.txt |   4 +
 .../src/test/results/db/copy_to_table.out.txt   |   2 +
 hplsql/src/test/results/db/part_count.out.txt   |  15 +
 .../src/test/results/db/part_count_by.out.txt   |  13 +
 hplsql/src/test/results/db/select_into.out.txt  |  58 +++-
 hplsql/src/test/results/db/select_into2.out.txt |  19 ++
 .../results/local/create_procedure2.out.txt     |  10 +
 hplsql/src/test/results/local/if2.out.txt       |   4 +
 hplsql/src/test/results/local/include.out.txt   |   8 +
 hplsql/src/test/results/local/mult_div.out.txt  |   7 +
 .../src/test/results/offline/select_db2.out.txt |   6 +
 47 files changed, 1471 insertions(+), 91 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
----------------------------------------------------------------------
diff --git a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
index ff772fe..bbe7276 100644
--- a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
+++ b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
@@ -43,6 +43,7 @@ stmt :
      | break_stmt
      | call_stmt
      | close_stmt
+     | cmp_stmt
      | copy_from_local_stmt
      | copy_stmt
      | commit_stmt
@@ -183,7 +184,7 @@ declare_handler_item :     // Condition handler declaration
      ;
      
 declare_temporary_table_item :     // DECLARE TEMPORARY TABLE statement
-       T_GLOBAL? T_TEMPORARY T_TABLE ident T_OPEN_P create_table_columns T_CLOSE_P create_table_options?
+       T_GLOBAL? T_TEMPORARY T_TABLE ident (T_AS? T_OPEN_P select_stmt T_CLOSE_P | T_AS? select_stmt | T_OPEN_P create_table_columns T_CLOSE_P) create_table_options?
      ;
      
 create_table_stmt :
@@ -252,6 +253,7 @@ create_table_options_db2_item :
      | T_DISTRIBUTE T_BY T_HASH T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P
      | T_LOGGED 
      | T_NOT T_LOGGED
+     | T_DEFINITION T_ONLY
      ;
      
 create_table_options_hive_item :
@@ -283,6 +285,7 @@ dtype :                  // Data types
      | T_DATETIME
      | T_DEC
      | T_DECIMAL
+     | T_DOUBLE T_PRECISION?
      | T_FLOAT
      | T_INT
      | T_INTEGER
@@ -377,7 +380,7 @@ else_block :
      ;
      
 include_stmt :          // INCLUDE statement
-       T_INCLUDE file_name
+       T_INCLUDE (file_name | expr)
      ;  
      
 insert_stmt :           // INSERT statement
@@ -418,7 +421,7 @@ get_diag_stmt_rowcount_item :
      ;
      
 grant_stmt :            
-       T_GRANT grant_stmt_item (T_COMMA grant_stmt_item)* T_TO ident
+       T_GRANT grant_stmt_item (T_COMMA grant_stmt_item)* T_TO T_ROLE ident
      ;
      
 grant_stmt_item :
@@ -445,12 +448,20 @@ close_stmt :            // CLOSE cursor statement
        T_CLOSE L_ID
      ;
      
+cmp_stmt :              // CMP statement
+       T_CMP (T_ROW_COUNT | T_SUM) cmp_source T_COMMA cmp_source
+     ;
+     
+cmp_source :
+      (table_name where_clause? | T_OPEN_P select_stmt T_CLOSE_P) (T_AT ident)?
+     ;
+     
 copy_from_local_stmt :  // COPY FROM LOCAL statement
        T_COPY T_FROM T_LOCAL copy_source (T_COMMA copy_source)* T_TO copy_target copy_file_option*
      ;
      
 copy_stmt :             // COPY statement
-       T_COPY (table_name | T_OPEN_P select_stmt T_CLOSE_P) T_TO copy_target copy_option*
+       T_COPY (table_name | T_OPEN_P select_stmt T_CLOSE_P) T_TO T_HDFS? copy_target copy_option*
      ;
      
 copy_source :
@@ -458,7 +469,7 @@ copy_source :
      ;
 
 copy_target :
-       (ident | expr | L_FILE)
+       (file_name | expr) 
      ;
     
 copy_option :
@@ -615,7 +626,7 @@ select_list_item :
      ;
      
 select_list_alias :
-       {!_input.LT(1).getText().equalsIgnoreCase("FROM")}? T_AS? ident
+       {!_input.LT(1).getText().equalsIgnoreCase("INTO") && !_input.LT(1).getText().equalsIgnoreCase("FROM")}? T_AS? ident
      | T_OPEN_P T_TITLE L_S_STRING T_CLOSE_P
      ;
      
@@ -642,7 +653,7 @@ from_table_name_clause :
      ;     
 
 from_subselect_clause :
-       T_OPEN_P subselect_stmt T_CLOSE_P from_alias_clause?
+       T_OPEN_P select_stmt T_CLOSE_P from_alias_clause?
      ;
      
 from_join_clause :
@@ -669,7 +680,8 @@ from_alias_clause :
         !_input.LT(1).getText().equalsIgnoreCase("EXECUTE") && 
         !_input.LT(1).getText().equalsIgnoreCase("GROUP") &&
         !_input.LT(1).getText().equalsIgnoreCase("ORDER") &&
-        !_input.LT(1).getText().equalsIgnoreCase("LIMIT")}?
+        !_input.LT(1).getText().equalsIgnoreCase("LIMIT") &&
+        !_input.LT(1).getText().equalsIgnoreCase("WITH")}?
        T_AS? ident (T_OPEN_P L_ID (T_COMMA L_ID)* T_CLOSE_P)? 
      ;
      
@@ -699,7 +711,7 @@ select_options :
 
 select_options_item :
        T_LIMIT expr
-     | T_WITH (T_RR | T_RS | T_CS | T_UR)
+     | T_WITH (T_RR | T_RS | T_CS | T_UR) (T_USE T_AND T_KEEP (T_EXCLUSIVE | T_UPDATE | T_SHARE) T_LOCKS)?
      ;
 
 update_stmt :                              // UPDATE statement
@@ -738,7 +750,7 @@ delete_stmt :                             // DELETE statement
      ;
      
 bool_expr :                               // Boolean condition
-       T_OPEN_P bool_expr T_CLOSE_P 
+       T_NOT? T_OPEN_P bool_expr T_CLOSE_P 
      | bool_expr bool_expr_logical_operator bool_expr 
      | bool_expr_atom
      ;
@@ -900,6 +912,7 @@ expr_spec_func :
      | T_MIN_PART_INT T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P 
      | T_MAX_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P 
      | T_MIN_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P 
+     | T_PART_COUNT T_OPEN_P expr (T_COMMA expr T_EQUAL expr)* T_CLOSE_P 
      | T_PART_LOC T_OPEN_P expr (T_COMMA expr T_EQUAL expr)+ (T_COMMA expr)? T_CLOSE_P 
      | T_TRIM T_OPEN_P expr T_CLOSE_P
      | T_SUBSTRING T_OPEN_P expr T_FROM expr (T_FOR expr)? T_CLOSE_P
@@ -946,7 +959,7 @@ host_stmt :
      ;
      
 file_name :
-       L_ID | L_FILE
+       L_FILE | '/'? ident ('/' ident)*
      ;
      
 date_literal :                             // DATE 'YYYY-MM-DD' literal
@@ -1012,6 +1025,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_CLIENT     
      | T_CLOSE 
      | T_CLUSTERED
+     | T_CMP
      | T_COLLECTION     
      | T_COPY
      | T_COMMIT
@@ -1043,6 +1057,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_DEFERRED
      | T_DEFINED
      | T_DEFINER
+     | T_DEFINITION
      | T_DELETE
      | T_DELIMITED
      | T_DELIMITER
@@ -1051,7 +1066,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_DIAGNOSTICS
      | T_DISTINCT 
      | T_DISTRIBUTE
-     | T_DO         
+     | T_DO        
+     | T_DOUBLE     
      | T_DROP    
      | T_DYNAMIC      
      // T_ELSE reserved word         
@@ -1062,7 +1078,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_EXCEPT       
      | T_EXEC         
      | T_EXECUTE      
-     | T_EXCEPTION    
+     | T_EXCEPTION  
+     | T_EXCLUSIVE     
      | T_EXISTS
      | T_EXIT         
      | T_FETCH  
@@ -1085,6 +1102,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_HANDLER      
      | T_HASH
      | T_HAVING       
+     | T_HDFS
      | T_HIVE         
      | T_HOST    
      | T_IDENTITY     
@@ -1106,7 +1124,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_ITEMS     
      | T_IS    
      | T_ISOPEN
-     | T_JOIN     
+     | T_JOIN  
+     | T_KEEP     
      | T_KEY
      | T_KEYS
      | T_LAG
@@ -1121,6 +1140,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_LOCAL     
      | T_LOCATOR
      | T_LOCATORS
+     | T_LOCKS
      | T_LOGGED    
      | T_LOGGING     
      | T_LOOP    
@@ -1157,10 +1177,12 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_OVER
      | T_OVERWRITE
      | T_OWNER
+     | T_PART_COUNT
      | T_PART_LOC 
      | T_PARTITION  
      | T_PCTFREE
-     | T_PCTUSED     
+     | T_PCTUSED  
+     | T_PRECISION     
      | T_PRESERVE
      | T_PRIMARY
      | T_PRINT 
@@ -1181,7 +1203,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_REVERSE    
      | T_RIGHT
      | T_RLIKE
-     | T_RS     
+     | T_RS 
+     | T_ROLE     
      | T_ROLLBACK
      | T_ROW
      | T_ROWS
@@ -1194,6 +1217,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_SELECT       
      | T_SET 
      | T_SETS     
+     | T_SHARE
      | T_SIGNAL
      | T_SMALLDATETIME
      | T_SMALLINT     
@@ -1277,6 +1301,7 @@ T_CHARACTER       : C H A R A C T E R ;
 T_CLIENT          : C L I E N T ;
 T_CLOSE           : C L O S E ;
 T_CLUSTERED       : C L U S T E R E D;
+T_CMP             : C M P ; 
 T_COLLECTION      : C O L L E C T I O N ; 
 T_COPY            : C O P Y ;
 T_COMMIT          : C O M M I T ; 
@@ -1304,6 +1329,7 @@ T_DEFAULT         : D E F A U L T ;
 T_DEFERRED        : D E F E R R E D ; 
 T_DEFINED         : D E F I N E D ; 
 T_DEFINER         : D E F I N E R ;
+T_DEFINITION      : D E F I N I T I O N ; 
 T_DELETE          : D E L E T E ;
 T_DELIMITED       : D E L I M I T E D ; 
 T_DELIMITER       : D E L I M I T E R ; 
@@ -1312,6 +1338,7 @@ T_DIAGNOSTICS     : D I A G N O S T I C S ;
 T_DISTINCT        : D I S T I N C T ;
 T_DISTRIBUTE      : D I S T R I B U T E ;
 T_DO              : D O ;
+T_DOUBLE          : D O U B L E ;
 T_DROP            : D R O P ;
 T_DYNAMIC         : D Y N A M I C ; 
 T_ELSE            : E L S E ;
@@ -1323,6 +1350,7 @@ T_EXCEPT          : E X C E P T ;
 T_EXEC            : E X E C ;
 T_EXECUTE         : E X E C U T E ;
 T_EXCEPTION       : E X C E P T I O N ;
+T_EXCLUSIVE       : E X C L U S I V E ; 
 T_EXISTS          : E X I S T S ; 
 T_EXIT            : E X I T ;
 T_FETCH           : F E T C H ;
@@ -1344,6 +1372,7 @@ T_GROUP           : G R O U P ;
 T_HANDLER         : H A N D L E R ;
 T_HASH            : H A S H ;
 T_HAVING          : H A V I N G ;
+T_HDFS            : H D F S ; 
 T_HIVE            : H I V E ;
 T_HOST            : H O S T ;
 T_IDENTITY        : I D E N T I T Y ; 
@@ -1366,6 +1395,7 @@ T_IS              : I S ;
 T_ISOPEN          : I S O P E N ;
 T_ITEMS           : I T E M S ; 
 T_JOIN            : J O I N ;
+T_KEEP            : K E E P; 
 T_KEY             : K E Y ;
 T_KEYS            : K E Y S ;
 T_LANGUAGE        : L A N G U A G E ;
@@ -1377,6 +1407,7 @@ T_LINES           : L I N E S ;
 T_LOCAL           : L O C A L ;
 T_LOCATOR         : L O C A T O R ; 
 T_LOCATORS        : L O C A T O R S ; 
+T_LOCKS           : L O C K S ; 
 T_LOGGED          : L O G G E D ; 
 T_LOGGING         : L O G G I N G ; 
 T_LOOP            : L O O P ;
@@ -1416,6 +1447,7 @@ T_OWNER           : O W N E R ;
 T_PARTITION       : P A R T I T I O N ; 
 T_PCTFREE         : P C T F R E E ; 
 T_PCTUSED         : P C T U S E D ;
+T_PRECISION       : P R E C I S I O N ; 
 T_PRESERVE        : P R E S E R V E ; 
 T_PRIMARY         : P R I M A R Y ;
 T_PRINT           : P R I N T ; 
@@ -1434,6 +1466,7 @@ T_RETURNS         : R E T U R N S ;
 T_REVERSE         : R E V E R S E ;
 T_RIGHT           : R I G H T ;
 T_RLIKE           : R L I K E ;
+T_ROLE            : R O L E ;
 T_ROLLBACK        : R O L L B A C K ;
 T_ROW             : R O W ; 
 T_ROWS            : R O W S ; 
@@ -1449,6 +1482,7 @@ T_SEL             : S E L ;
 T_SELECT          : S E L E C T ; 
 T_SET             : S E T ;
 T_SETS            : S E T S;
+T_SHARE           : S H A R E ; 
 T_SIGNAL          : S I G N A L ;
 T_SMALLDATETIME   : S M A L L D A T E T I M E ;
 T_SMALLINT        : S M A L L I N T ;
@@ -1513,6 +1547,7 @@ T_MAX_PART_INT         : M A X '_' P A R T '_' I N T ;
 T_MIN_PART_INT         : M I N '_' P A R T '_' I N T ;
 T_MAX_PART_DATE        : M A X '_' P A R T '_' D A T E ;
 T_MIN_PART_DATE        : M I N '_' P A R T '_' D A T E ;
+T_PART_COUNT           : P A R T '_' C O U N T ; 
 T_PART_LOC             : P A R T '_' L O C ;
 T_RANK                 : R A N K ;
 T_ROW_NUMBER           : R O W '_' N U M B E R;
@@ -1566,8 +1601,7 @@ L_WS        : L_BLANK+ -> skip ;                                       // Whites
 L_M_COMMENT : '/*' .*? '*/' -> channel(HIDDEN) ;                       // Multiline comment
 L_S_COMMENT : ('--' | '//')  .*? '\r'? '\n' -> channel(HIDDEN) ;       // Single line comment
 
-L_FILE      : '/'? L_ID ('/' L_ID)*                                    // File path
-            | ([a-zA-Z] ':' '\\'?)? L_ID ('\\' L_ID)*
+L_FILE      : ([a-zA-Z] ':' '\\'?)? L_ID ('\\' L_ID)*                  // File path (a/b/c Linux path causes conflicts with division operator and handled at parser level)
             ; 
 
 L_LABEL     : ([a-zA-Z] | L_DIGIT | '_')* ':'            

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java b/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java
new file mode 100644
index 0000000..ee65a88
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java
@@ -0,0 +1,314 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.util.ArrayList;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class Cmp implements Runnable {
+  
+  Exec exec;
+  Timer timer = new Timer();  
+  boolean trace = false;
+  boolean info = false;
+  
+  Query query;
+  String conn;
+  HplsqlParser.Cmp_stmtContext ctx;
+  
+  int tests = 0;
+  int failedTests = 0;
+  int failedTestsHighDiff = 0;
+  
+  Cmp(Exec e) {
+    exec = e;  
+    trace = exec.getTrace();
+    info = exec.getInfo();
+  }
+  
+  Cmp(Exec e, HplsqlParser.Cmp_stmtContext c, Query q, String cn) {
+    exec = e;  
+    trace = exec.getTrace();
+    info = exec.getInfo();
+    ctx = c;
+    query = q;
+    conn = cn;
+  }
+  
+  /**
+   * Run CMP command
+   */
+  Integer run(HplsqlParser.Cmp_stmtContext ctx) {
+    trace(ctx, "CMP");
+    this.ctx = ctx;
+    timer.start();
+    StringBuilder conn1 = new StringBuilder();
+    StringBuilder conn2 = new StringBuilder();
+    Query query1 = new Query();
+    Query query2 = new Query();
+    Boolean equal = null;
+    try {
+      String sql1 = getSql(ctx, conn1, 0);
+      String sql2 = getSql(ctx, conn2, 1);
+      if (trace) {
+        trace(ctx, "Query 1: " + sql1);
+        trace(ctx, "Query 2: " + sql2);
+      }
+      query1.setSql(sql1);
+      query2.setSql(sql2);
+      Cmp cmp1 = new Cmp(exec, ctx, query1, conn1.toString());
+      Cmp cmp2 = new Cmp(exec, ctx, query2, conn2.toString());
+      Thread t1 = new Thread(cmp1);
+      Thread t2 = new Thread(cmp2);
+      t1.start();
+      t2.start();
+      t1.join();
+      t2.join();
+      equal = compare(query1, query2);      
+    }
+    catch(Exception e) {
+      exec.signal(e);
+      return -1;
+    }
+    finally {
+      long elapsed = timer.stop();
+      if (info) {
+        String message = "CMP ";
+        if (equal != null) {
+          if (equal) {
+            message += "Equal, " + tests + " tests";
+          }
+          else {
+            message += "Not Equal, " + failedTests + " of " + tests + " tests failed";
+            message += ", " + failedTestsHighDiff + " failed tests with more than 0.01% difference";
+          }
+        }
+        else {
+          message += "Failed";
+        }
+        info(ctx, message + ", " + timer.format());
+      }
+      exec.closeQuery(query1, conn1.toString());
+      exec.closeQuery(query2, conn2.toString());
+    }
+    return 0;
+  }
+  
+  /**
+   * Get data for comparison from the source
+   */
+  public void run() {
+    exec.executeQuery(ctx, query, conn);
+  }
+  
+  /**
+   * Compare the results
+   */
+  Boolean compare(Query query1, Query query2) {
+    if (query1.error()) { 
+      exec.signal(query1);
+      return null;
+    }
+    else if (query2.error()) { 
+      exec.signal(query2);
+      return null;
+    }
+    ResultSet rs1 = query1.getResultSet();
+    ResultSet rs2 = query2.getResultSet();
+    if (rs1 == null || rs2 == null) {
+      exec.setSqlCode(-1);
+      return null;
+    }
+    boolean equal = true;
+    tests = 0;
+    failedTests = 0;
+    try {
+      ResultSetMetaData rm1 = rs1.getMetaData();
+      ResultSetMetaData rm2 = rs2.getMetaData();
+      int cnt1 = rm1.getColumnCount();
+      int cnt2 = rm2.getColumnCount();
+      tests = cnt1;
+      while (rs1.next() && rs2.next()) {
+        for (int i = 1; i <= tests; i++) {
+          Var v1 = new Var(Var.Type.DERIVED_TYPE);
+          Var v2 = new Var(Var.Type.DERIVED_TYPE);
+          v1.setValue(rs1, rm1, i);
+          if (i <= cnt2) {
+            v2.setValue(rs2, rm2, i);
+          }
+          boolean e = true;
+          if (!(v1.isNull() && v2.isNull()) && !v1.equals(v2)) {
+            equal = false;
+            e = false;
+            failedTests++;
+          }
+          if (trace || info) {
+            String m = rm1.getColumnName(i) + "\t" + v1.toString() + "\t" + v2.toString();
+            if (!e) {
+              m += "\tNot equal";
+              BigDecimal diff = v1.percentDiff(v2);
+              if (diff != null) {
+                if (diff.compareTo(BigDecimal.ZERO) != 0) {
+                  m += ", " + diff + "% difference";
+                  failedTestsHighDiff++;
+                }
+                else {
+                  m += ", less then 0.01% difference";
+                }
+              }
+              else {
+                failedTestsHighDiff++;
+              }
+            }
+            if (trace) {
+              trace(null, m);
+            }
+            else {
+              info(null, m);
+            }
+          }
+        }
+        if (equal) {
+          exec.setSqlSuccess();
+        }
+        else {
+          exec.setSqlCode(1);
+        }
+      }
+    }
+    catch(Exception e) {
+      exec.signal(e);
+      return null;
+    }
+    return new Boolean(equal);
+  }
+  
+  /**
+   * Define the SQL query to access data
+   */
+  String getSql(HplsqlParser.Cmp_stmtContext ctx, StringBuilder conn, int idx) throws Exception {
+    StringBuilder sql = new StringBuilder();
+    String table = null;
+    String query = null;
+    if (ctx.cmp_source(idx).table_name() != null) {
+      table = evalPop(ctx.cmp_source(idx).table_name()).toString();
+    }
+    else {
+      query = evalPop(ctx.cmp_source(idx).select_stmt()).toString();
+    }
+    if (ctx.cmp_source(idx).T_AT() != null) {
+      conn.append(ctx.cmp_source(idx).ident().getText());
+    }
+    else if (table != null) {
+      conn.append(exec.getObjectConnection(ctx.cmp_source(idx).table_name().getText()));
+    }  
+    else {
+      conn.append(exec.getStatementConnection());
+    }
+    sql.append("SELECT ");
+    sql.append(getSelectList(ctx, conn.toString(), table, query));
+    sql.append(" FROM ");
+    if (table != null) {
+      sql.append(table);
+      if (ctx.cmp_source(idx).where_clause() != null) {
+        sql.append(" " + evalPop(ctx.cmp_source(idx).where_clause()).toString());
+      }
+    }
+    else {
+      sql.append("(");
+      sql.append(query);
+      sql.append(") t");
+    }
+    return sql.toString();
+  }
+  
+  /**
+   * Define SELECT listto access data
+   */
+  String getSelectList(HplsqlParser.Cmp_stmtContext ctx, String conn, String table, String query) throws Exception {
+    StringBuilder sql = new StringBuilder();
+    sql.append("COUNT(1) AS row_count");
+    if (ctx.T_SUM() != null && table != null) {
+      Row row = exec.meta.getRowDataType(ctx, conn, table);
+      if (row != null) {
+        ArrayList<Column> cols = row.getColumns();
+        int cnt = row.size();
+        sql.append(",\n");
+        for (int i = 0; i < cnt; i++) {
+          Column col = cols.get(i);
+          String name = col.getName();
+          Var.Type type = Var.defineType(col.getType());
+          sql.append("COUNT(" + name + ") AS " + name + "_COUNT_NOT_NULL");
+          if (type == Var.Type.STRING) {
+            sql.append(",\n");
+            sql.append("SUM(LENGTH(" + name + ")) AS " + name + "_SUM_LENGTH,\n");
+            sql.append("MIN(LENGTH(" + name + ")) AS " + name + "_MIN_LENGTH,\n");
+            sql.append("MAX(LENGTH(" + name + ")) AS " + name + "_MAX_LENGTH");
+          }
+          else if (type == Var.Type.BIGINT || type == Var.Type.DECIMAL || type == Var.Type.DOUBLE) {
+            sql.append(",\n");
+            sql.append("SUM(" + name + ") AS " + name + "_SUM,\n");
+            sql.append("MIN(" + name + ") AS " + name + "_MIN,\n");
+            sql.append("MAX(" + name + ") AS " + name + "_MAX");
+          }
+          else if (type == Var.Type.DATE || type == Var.Type.TIMESTAMP) {
+            sql.append(",\n");
+            sql.append("SUM(YEAR(" + name + ")) AS " + name + "_SUM_YEAR,\n");
+            sql.append("SUM(MONTH(" + name + ")) AS " + name + "_SUM_MONTH,\n");
+            sql.append("SUM(DAY(" + name + ")) AS " + name + "_SUM_DAY,\n");
+            sql.append("MIN(" + name + ") AS " + name + "_MIN,\n");
+            sql.append("MAX(" + name + ") AS " + name + "_MAX");
+          }
+          if (i + 1 < cnt) {
+            sql.append(",\n");
+          }
+        }
+      }
+    }
+    return sql.toString();
+  }
+  
+  /**
+   * Evaluate the expression and pop value from the stack
+   */
+  Var evalPop(ParserRuleContext ctx) {
+    exec.visit(ctx);
+    if (!exec.stack.isEmpty()) { 
+      return exec.stackPop();
+    }
+    return Var.Empty;
+  }
+
+  /**
+   * Trace and information
+   */
+  public void trace(ParserRuleContext ctx, String message) {
+    exec.trace(ctx, message);
+  }
+  
+  public void info(ParserRuleContext ctx, String message) {
+    exec.info(ctx, message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Column.java b/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
index 252a870..e4e914c 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
@@ -27,9 +27,36 @@ public class Column {
   String type;
   Var value;
   
+  int len;
+  int scale;
+  
   Column(String name, String type) {
     this.name = name;
-    this.type = type;
+    len = 0;
+    scale = 0;
+    setType(type);
+  }
+  
+  /**
+   * Set the column type with its length/precision
+   */
+  void setType(String type) {
+    int open = type.indexOf('(');
+    if (open == -1) {
+      this.type = type;
+    }
+    else {
+      this.type = type.substring(0, open);
+      int comma = type.indexOf(',', open);
+      int close = type.indexOf(')', open);
+      if (comma == -1) {
+        len = Integer.parseInt(type.substring(open + 1, close));
+      }
+      else {
+        len = Integer.parseInt(type.substring(open + 1, comma));
+        scale = Integer.parseInt(type.substring(comma + 1, close));
+      }
+    }
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java b/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
index c8cc910..12f43c9 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
@@ -26,6 +26,7 @@ import java.sql.Connection;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.ResultSet;
+import java.sql.PreparedStatement;
 
 public class Conn {
  
@@ -76,6 +77,25 @@ public class Conn {
   }
   
   /**
+   * Prepare a SQL query
+   */
+  public Query prepareQuery(Query query, String connName) {
+    try {
+      Connection conn = getConnection(connName);
+      timer.start();
+      PreparedStatement stmt = conn.prepareStatement(query.sql);
+      timer.stop();
+      query.set(conn, stmt);      
+      if (info) {
+        exec.info(null, "Prepared statement executed successfully (" + timer.format() + ")");
+      }      
+    } catch (Exception e) {
+      query.setError(e);
+    }
+    return query;
+  }
+  
+  /**
    * Execute a SQL statement
    */
   public Query executeSql(String sql, String connName) {
@@ -117,6 +137,7 @@ public class Conn {
     if (sqls != null) {
       Statement s = conn.createStatement();
       for (String sql : sqls) {
+        exec.info(null, "Starting pre-SQL statement");
         s.execute(sql);
       }
       s.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
index 30b98ca..9968b24 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.List;
 import java.io.FileOutputStream;
+import java.io.OutputStream;
 import java.io.IOException;
 
 import org.apache.hadoop.fs.FileSystem;
@@ -165,8 +166,8 @@ public class Copy {
     exec.returnConnection(targetConn, conn);
     exec.setRowCount(rows);
     long elapsed = timer.stop();
-    if (trace) {
-      trace(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/(elapsed/1000) + " rows/sec");
+    if (info) {
+      info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/(elapsed/1000) + " rows/sec");
     }
   }
   
@@ -192,16 +193,35 @@ public class Copy {
     byte[] nullstr = "NULL".getBytes();
     int cols = rm.getColumnCount();
     int rows = 0;
-    if (trace) {
-      trace(ctx, "SELECT executed: " + cols + " columns, output file: " + filename);
+    if (trace || info) {
+      String mes = "Query executed: " + cols + " columns, output file: " + filename;
+      if (trace) {
+        trace(ctx, mes);
+      }
+      else {
+        info(ctx, mes);
+      }
     } 
-    java.io.File file = new java.io.File(filename);
-    FileOutputStream out = null;
+    java.io.File file = null;
+    File hdfsFile = null;
+    if (ctx.T_HDFS() == null) {
+      file = new java.io.File(filename);
+    }
+    else {
+      hdfsFile = new File();
+    }     
+    OutputStream out = null;
+    timer.start();
     try {      
-      if (!file.exists()) {
-        file.createNewFile();
+      if (file != null) {
+        if (!file.exists()) {
+          file.createNewFile();
+        }
+        out = new FileOutputStream(file, false /*append*/);
+      }
+      else {
+        out = hdfsFile.create(filename, true /*overwrite*/);
       }
-      out = new FileOutputStream(file, false /*append*/);
       String col;
       String sql = "";
       if (sqlInsert) {
@@ -237,8 +257,9 @@ public class Copy {
         out.close();
       }
     }
-    if (trace) {
-      trace(ctx, "COPY rows: " + rows);
+    long elapsed = timer.stop();
+    if (info) {
+      info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/elapsed/1000 + " rows/sec");
     }
   }
   
@@ -376,7 +397,12 @@ public class Copy {
       }
       else if (option.T_AT() != null) {
         targetConn = option.ident().getText();
-        sqlInsertName = ctx.copy_target().ident().getText();
+        if (ctx.copy_target().expr() != null) {
+          sqlInsertName = evalPop(ctx.copy_target().expr()).toString();
+        }
+        else {
+          sqlInsertName = ctx.copy_target().getText();
+        }
       }
       else if (option.T_BATCHSIZE() != null) {
         batchSize = evalPop(option.expr()).intValue();

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
index f5592e1..38b5380 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
@@ -506,6 +506,24 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
   public Query executeQuery(ParserRuleContext ctx, String sql, String connProfile) {
     return executeQuery(ctx, new Query(sql), connProfile);
   }
+  
+  /**
+   * Prepare a SQL query (SELECT)
+   */
+  public Query prepareQuery(ParserRuleContext ctx, Query query, String connProfile) {
+    if (!exec.offline) {
+      exec.rowCount = 0;
+      exec.conn.prepareQuery(query, connProfile);
+      return query;
+    }
+    setSqlNoData();
+    info(ctx, "Not executed - offline mode set");
+    return query;
+  }
+
+  public Query prepareQuery(ParserRuleContext ctx, String sql, String connProfile) {
+    return prepareQuery(ctx, new Query(sql), connProfile);
+  }
 
   /**
    * Execute a SQL statement 
@@ -950,6 +968,11 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
   }
   
   @Override 
+  public Integer visitFrom_subselect_clause(HplsqlParser.From_subselect_clauseContext ctx) { 
+    return exec.select.fromSubselect(ctx); 
+  }
+  
+  @Override 
   public Integer visitFrom_join_clause(HplsqlParser.From_join_clauseContext ctx) { 
     return exec.select.fromJoin(ctx); 
   }
@@ -1162,6 +1185,14 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
   }
   
   /**
+   * CMP statement
+   */
+  @Override 
+  public Integer visitCmp_stmt(HplsqlParser.Cmp_stmtContext ctx) { 
+    return new Cmp(exec).run(ctx); 
+  }
+  
+  /**
    * COPY statement
    */
   @Override 
@@ -1926,8 +1957,13 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    */
   @Override 
   public Integer visitDate_literal(HplsqlParser.Date_literalContext ctx) { 
-    String str = evalPop(ctx.string()).toString();
-    stackPush(new Var(Var.Type.DATE, Utils.toDate(str))); 
+    if (!exec.buildSql) {
+      String str = evalPop(ctx.string()).toString();
+      stackPush(new Var(Var.Type.DATE, Utils.toDate(str)));
+    }
+    else {
+      stackPush(getFormattedText(ctx));
+    }
     return 0; 
   }
 
@@ -1936,16 +1972,21 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    */
   @Override 
   public Integer visitTimestamp_literal(HplsqlParser.Timestamp_literalContext ctx) { 
-    String str = evalPop(ctx.string()).toString();
-    int len = str.length();
-    int precision = 0;
-    if (len > 19 && len <= 29) {
-      precision = len - 20;
-      if (precision > 3) {
-        precision = 3;
+    if (!exec.buildSql) {
+      String str = evalPop(ctx.string()).toString();
+      int len = str.length();
+      int precision = 0;
+      if (len > 19 && len <= 29) {
+        precision = len - 20;
+        if (precision > 3) {
+          precision = 3;
+        }
       }
+      stackPush(new Var(Utils.toTimestamp(str), precision));
+    }
+    else {
+      stackPush(getFormattedText(ctx));
     }
-    stackPush(new Var(Utils.toTimestamp(str), precision)); 
     return 0; 
   }
   
@@ -1979,6 +2020,9 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    * @throws Exception 
    */
   Connection getConnection(String conn) throws Exception {
+    if (conn == null || conn.equalsIgnoreCase("default")) {
+      conn = exec.conf.defaultConnection;
+    }
     return exec.conn.getConnection(conn);
   }
   
@@ -1993,7 +2037,7 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    * Define the database type by profile name
    */
   Conn.Type getConnectionType(String conn) {
-    return exec.conn.getType(conn);
+    return exec.conn.getTypeByProfile(conn);
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java b/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
index 7269798..7c500a8 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
@@ -49,6 +49,9 @@ public class Expression {
       else if (ctx.T_SUB() != null) {
         operatorSub(ctx); 
       }
+      else if (ctx.T_MUL() != null) {
+        operatorMultiply(ctx); 
+      }
       else if (ctx.T_DIV() != null) {
         operatorDiv(ctx); 
       }
@@ -98,16 +101,17 @@ public class Expression {
    * Evaluate a boolean expression
    */
   public void execBool(HplsqlParser.Bool_exprContext ctx) {
-    if (ctx.T_OPEN_P() != null) {
-      eval(ctx.bool_expr(0));
-      return;
-    }
-    else if (ctx.bool_expr_atom() != null) {
+    if (ctx.bool_expr_atom() != null) {
       eval(ctx.bool_expr_atom());
       return;
     }
     Var result = evalPop(ctx.bool_expr(0));
-    if (ctx.bool_expr_logical_operator() != null) {
+    if (ctx.T_OPEN_P() != null) {
+      if (ctx.T_NOT() != null) {
+        result.negate();
+      }
+    }
+    else if (ctx.bool_expr_logical_operator() != null) {
       if (ctx.bool_expr_logical_operator().T_AND() != null) {
         if (result.isTrue()) {
           result = evalPop(ctx.bool_expr(1));
@@ -359,6 +363,23 @@ public class Expression {
   }
   
   /**
+   * Multiplication operator
+   */
+  public void operatorMultiply(HplsqlParser.ExprContext ctx) {
+    Var v1 = evalPop(ctx.expr(0));
+    Var v2 = evalPop(ctx.expr(1));
+    if (v1.value == null || v2.value == null) {
+      evalNull();
+    }
+    else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+      exec.stackPush(new Var((Long)v1.value * (Long)v2.value)); 
+    }
+    else {
+      exec.signal(Signal.Type.UNSUPPORTED_OPERATION, "Unsupported data types in multiplication operator");
+    }
+  }
+  
+  /**
    * Division operator
    */
   public void operatorDiv(HplsqlParser.ExprContext ctx) {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/File.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/File.java b/hplsql/src/main/java/org/apache/hive/hplsql/File.java
index 6a8ddfe..e748772 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/File.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/File.java
@@ -46,17 +46,27 @@ public class File {
   /**
    * Create a file
    */
-  public void create(String dir, String file, boolean overwrite) {
-    path = new Path(dir, file);
+  public FSDataOutputStream create(boolean overwrite) {
     try {
       if (fs == null) {
-        fs = FileSystem.get(new Configuration());
+        fs = createFs();
       }
       out = fs.create(path, overwrite);
     } 
     catch (IOException e) {
       e.printStackTrace();
     }
+    return out;
+  }
+  
+  public FSDataOutputStream create(String dir, String file, boolean overwrite) {
+    path = new Path(dir, file);
+    return create(overwrite);
+  }
+
+  public FSDataOutputStream create(String file, boolean overwrite) {
+    path = new Path(file);
+    return create(overwrite);
   }
   
   /**
@@ -66,7 +76,7 @@ public class File {
    path = new Path(dir, file);
    try {
      if (fs == null) {
-       fs = FileSystem.get(new Configuration());
+       fs = createFs();
      }
      in = fs.open(path);
    } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java b/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
index 485bcdf..2e04ef9 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
@@ -18,7 +18,9 @@
 
 package org.apache.hive.hplsql;
 
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
 import java.util.ArrayList;
 import java.util.HashMap;
 
@@ -90,12 +92,9 @@ public class Meta {
    */
   Row readColumns(ParserRuleContext ctx, String conn, String table, HashMap<String, Row> map) {
     Row row = null;
-    String sql = null;
     Conn.Type connType = exec.getConnectionType(conn); 
     if (connType == Conn.Type.HIVE) {
-      sql = "DESCRIBE " + table;
-    }
-    if (sql != null) {
+      String sql = "DESCRIBE " + table;
       Query query = new Query(sql);
       exec.executeQuery(ctx, query, conn); 
       if (!query.error()) {
@@ -115,6 +114,27 @@ public class Meta {
       }
       exec.closeQuery(query, conn);
     }
+    else {
+      Query query = exec.prepareQuery(ctx, "SELECT * FROM " + table, conn); 
+      if (!query.error()) {
+        try {
+          PreparedStatement stmt = query.getPreparedStatement();
+          ResultSetMetaData rm = stmt.getMetaData();
+          int cols = rm.getColumnCount();
+          for (int i = 1; i <= cols; i++) {
+            String col = rm.getColumnName(i);
+            String typ = rm.getColumnTypeName(i);
+            if (row == null) {
+              row = new Row();
+            }
+            row.addColumn(col.toUpperCase(), typ);
+          }
+          map.put(table, row);
+        }
+        catch (Exception e) {}
+      }
+      exec.closeQuery(query, conn);
+    }
     return row;
   }
   

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Query.java b/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
index 08cd6a7..e196f86 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
@@ -21,6 +21,7 @@ package org.apache.hive.hplsql;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.PreparedStatement;
 import java.sql.Statement;
 
 import org.antlr.v4.runtime.ParserRuleContext;
@@ -32,6 +33,7 @@ public class Query {
   
   Connection conn;
   Statement stmt;
+  PreparedStatement pstmt;
   ResultSet rs;
   Exception exception;
 
@@ -59,6 +61,11 @@ public class Query {
     }
   }
   
+  public void set(Connection conn, PreparedStatement pstmt) {
+    this.conn = conn;
+    this.pstmt = pstmt;
+  }
+  
   /**
    * Set the fetch status
    */
@@ -132,6 +139,10 @@ public class Query {
         stmt.close();
         stmt = null;
       }
+      if(pstmt != null) {
+        pstmt.close();
+        pstmt = null;
+      }
       state = State.CLOSE;
     } catch (SQLException e) {
       e.printStackTrace();
@@ -190,6 +201,13 @@ public class Query {
   }
   
   /**
+   * Get the prepared statement object
+   */
+  public PreparedStatement getPreparedStatement() {
+    return pstmt;
+  }
+  
+  /**
    * Get the connection object
    */
   public Connection getConnection() {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Select.java b/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
index 71ca848..56fbb05 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
@@ -218,7 +218,10 @@ public class Select {
       sql.append(" " + getText(ctx.order_by_clause()));
     }
     if (ctx.select_options() != null) {
-      sql.append(" " + evalPop(ctx.select_options()));
+      Var opt = evalPop(ctx.select_options());
+      if (!opt.isNull()) {
+        sql.append(" " + opt.toString());
+      }
     }
     if (ctx.select_list().select_list_limit() != null) {
       sql.append(" LIMIT " + evalPop(ctx.select_list().select_list_limit().expr()));
@@ -281,6 +284,21 @@ public class Select {
     exec.stackPush(sql);
     return 0; 
   }
+  
+  /**
+   * Subselect in FROM
+   */
+  public Integer fromSubselect(HplsqlParser.From_subselect_clauseContext ctx) {     
+    StringBuilder sql = new StringBuilder();
+    sql.append("(");
+    sql.append(evalPop(ctx.select_stmt()).toString());
+    sql.append(")");
+    if (ctx.from_alias_clause() != null) {
+      sql.append(" ").append(exec.getText(ctx.from_alias_clause()));
+    }
+    exec.stackPush(sql);
+    return 0; 
+  }
  
   /**
    * JOIN clause in FROM
@@ -341,10 +359,13 @@ public class Select {
    * WHERE clause
    */
   public Integer where(HplsqlParser.Where_clauseContext ctx) { 
+    boolean oldBuildSql = exec.buildSql; 
+    exec.buildSql = true;
     StringBuilder sql = new StringBuilder();
     sql.append(ctx.T_WHERE().getText());
     sql.append(" " + evalPop(ctx.bool_expr()));
     exec.stackPush(sql);
+    exec.buildSql = oldBuildSql;
     return 0;
   }
   

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
index 6193f49..db9ea65 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
@@ -435,7 +435,13 @@ public class Stmt {
    * INCLUDE statement
    */
   public Integer include(HplsqlParser.Include_stmtContext ctx) {
-    String file = ctx.file_name().getText();
+    String file;
+    if (ctx.file_name() != null) {
+      file = ctx.file_name().getText();
+    }
+    else {
+      file = evalPop(ctx.expr()).toString();
+    }    
     trace(ctx, "INCLUDE " + file);
     exec.includeFile(file);
     return 0; 

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
index b31a14d..150e8b4 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
@@ -19,6 +19,7 @@
 package org.apache.hive.hplsql;
 
 import java.math.BigDecimal;
+import java.math.RoundingMode;
 import java.util.ArrayList;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
@@ -32,7 +33,7 @@ import java.sql.Timestamp;
 public class Var {
 
 	// Data types
-	public enum Type {BOOL, CURSOR, DATE, DECIMAL, DERIVED_TYPE, DERIVED_ROWTYPE, FILE, IDENT, BIGINT, INTERVAL, ROW, 
+	public enum Type {BOOL, CURSOR, DATE, DECIMAL, DERIVED_TYPE, DERIVED_ROWTYPE, DOUBLE, FILE, IDENT, BIGINT, INTERVAL, ROW, 
 	                  RS_LOCATOR, STRING, STRINGLIST, TIMESTAMP, NULL};
 	public static final String DERIVED_TYPE = "DERIVED%TYPE";
 	public static final String DERIVED_ROWTYPE = "DERIVED%ROWTYPE";
@@ -79,6 +80,11 @@ public class Var {
     this.value = value;
   }
   
+	public Var(Double value) {
+    this.type = Type.DOUBLE;
+    this.value = value;
+  }
+	
 	public Var(Date value) {
     this.type = Type.DATE;
     this.value = value;
@@ -169,6 +175,9 @@ public class Var {
 	    if (val.type == Type.BIGINT) {
 	      value = BigDecimal.valueOf(val.longValue());
 	    }
+	    else if (val.type == Type.DOUBLE) {
+	      value = BigDecimal.valueOf(val.doubleValue());
+	    }
 	  }
 	  else if (type == Type.DATE) {
 	    value = Utils.toDate(val.toString());
@@ -238,6 +247,9 @@ public class Var {
     else if (type == java.sql.Types.DECIMAL || type == java.sql.Types.NUMERIC) {
       cast(new Var(rs.getBigDecimal(idx)));
     }
+    else if (type == java.sql.Types.FLOAT || type == java.sql.Types.DOUBLE) {
+      cast(new Var(new Double(rs.getDouble(idx))));
+    }
     return this;
   }
   
@@ -287,6 +299,9 @@ public class Var {
     else if (type.equalsIgnoreCase("DEC") || type.equalsIgnoreCase("DECIMAL") || type.equalsIgnoreCase("NUMERIC")) {
       return Type.DECIMAL;
     }
+    else if (type.equalsIgnoreCase("FLOAT") || type.toUpperCase().startsWith("DOUBLE")) {
+      return Type.DOUBLE;
+    }
     else if (type.equalsIgnoreCase("DATE")) {
       return Type.DATE;
     }
@@ -332,34 +347,59 @@ public class Var {
     scale = 0;
 	}
 	
-	/*
+	/**
 	 * Compare values
 	 */
 	@Override
   public boolean equals(Object obj) {
-	  if (this == obj) {
+	  if (getClass() != obj.getClass()) {
+      return false;
+    }    
+	  Var var = (Var)obj;  
+	  if (this == var) {
       return true;
 	  }
-	  else if (obj == null || this.value == null) {
+	  else if (var == null || var.value == null || this.value == null) {
       return false;
     }
-	  else if (getClass() != obj.getClass()) {
-      return false;
-	  }
-	  
-    Var var = (Var)obj;    
-    if (type == Type.BIGINT && var.type == Type.BIGINT &&
-       ((Long)value).longValue() == ((Long)var.value).longValue()) {
-      return true;
+    if (type == Type.BIGINT) {
+      if (var.type == Type.BIGINT && ((Long)value).longValue() == ((Long)var.value).longValue()) {
+        return true;
+      }
+      else if (var.type == Type.DECIMAL) {
+        return equals((BigDecimal)var.value, (Long)value);
+      }
     }
     else if (type == Type.STRING && var.type == Type.STRING &&
             ((String)value).equals((String)var.value)) {
       return true;
     }
+    else if (type == Type.DECIMAL && var.type == Type.DECIMAL &&
+            ((BigDecimal)value).compareTo((BigDecimal)var.value) == 0) {
+      return true;
+    }
+    else if (type == Type.DOUBLE) {
+      if (var.type == Type.DOUBLE && ((Double)value).compareTo((Double)var.value) == 0) {
+        return true;
+      }
+      else if (var.type == Type.DECIMAL && ((Double)value).compareTo(((BigDecimal)var.value).doubleValue()) == 0) {
+        return true;
+      }
+    }
     return false;
 	}
+    
+  /**
+   * Check if variables of different data types are equal
+   */
+  public boolean equals(BigDecimal d, Long i) {
+    if (d.compareTo(new BigDecimal(i)) == 0) {
+      return true;
+    }
+    return false;
+  }
 	
-	/*
+	/**
    * Compare values
    */
   public int compareTo(Var v) {
@@ -377,6 +417,20 @@ public class Var {
     }
     return -1;
   }
+  
+  /**
+   * Calculate difference between values in percent
+   */
+  public BigDecimal percentDiff(Var var) {
+    BigDecimal d1 = new Var(Var.Type.DECIMAL).cast(this).decimalValue();
+    BigDecimal d2 = new Var(Var.Type.DECIMAL).cast(var).decimalValue();
+    if (d1 != null && d2 != null) {
+      if (d1.compareTo(BigDecimal.ZERO) != 0) {
+        return d1.subtract(d2).abs().multiply(new BigDecimal(100)).divide(d1, 2, RoundingMode.HALF_UP);
+      }
+    }
+    return null;
+  }
 	
 	 /**
    * Increment an integer value
@@ -417,6 +471,26 @@ public class Var {
     }
     return -1;
   }
+  
+  /**
+   * Return a decimal value
+   */
+  public BigDecimal decimalValue() {
+    if (type == Type.DECIMAL) {
+      return (BigDecimal)value;
+    }
+    return null;
+  }
+  
+  /**
+   * Return a double value
+   */
+  public double doubleValue() {
+    if (type == Type.DOUBLE) {
+      return ((Double)value).doubleValue();
+    }
+    return -1;
+  }
 	
 	/**
 	 * Return true/false for BOOL type
@@ -429,6 +503,16 @@ public class Var {
 	}
 	
 	/**
+	 * Negate the boolean value
+	 */
+	public void negate() {
+    if(type == Type.BOOL && value != null) {
+      boolean v = ((Boolean)value).booleanValue();
+      value = Boolean.valueOf(!v);
+    }
+  }
+	
+	/**
 	 * Check if the variable contains NULL
 	 */
 	public boolean isNull() {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
index ae7acae..aa40a0a 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
@@ -88,7 +88,7 @@ public class Function {
     if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) {
       trace(ctx, "FUNC " + name);      
     }
-    FuncCommand func = map.get(name);    
+    FuncCommand func = map.get(name.toUpperCase());    
     if (func != null) {
       func.run(ctx);
     }    
@@ -693,6 +693,10 @@ public class Function {
     exec.stackPush(new Var(i)); 
   }
   
+  void evalInt(int i) {
+    evalInt(new Long(i));
+  }
+  
   /**
    * Evaluate the expression to specified Date value
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
index e022024..091552f 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
@@ -18,6 +18,12 @@
 
 package org.apache.hive.hplsql.functions;
 
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.hive.hplsql.*;
 
 public class FunctionMisc extends Function {
@@ -34,11 +40,13 @@ public class FunctionMisc extends Function {
     f.map.put("DECODE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { decode(ctx); }});
     f.map.put("NVL", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }});
     f.map.put("NVL2", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl2(ctx); }});
+    f.map.put("PART_COUNT_BY", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { partCountBy(ctx); }});
     
     f.specMap.put("ACTIVITY_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { activityCount(ctx); }});
     f.specMap.put("CAST", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { cast(ctx); }});
     f.specMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { current(ctx); }});
     f.specMap.put("CURRENT_USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }});
+    f.specMap.put("PART_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { partCount(ctx); }});
     f.specMap.put("USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }});
 
     f.specSqlMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentSql(ctx); }});
@@ -185,4 +193,117 @@ public class FunctionMisc extends Function {
       evalNull();
     }
   }
+  
+  /**
+   * PART_COUNT function
+   */
+  public void partCount(HplsqlParser.Expr_spec_funcContext ctx) {
+    String tabname = evalPop(ctx.expr(0)).toString();
+    StringBuilder sql = new StringBuilder();
+    sql.append("SHOW PARTITIONS ");
+    sql.append(tabname);    
+    int cnt = ctx.expr().size();   
+    if (cnt > 1) {
+      sql.append(" PARTITION (");
+      int i = 1;
+      while (i + 1 < cnt) {
+        String col = evalPop(ctx.expr(i)).toString();
+        String val = evalPop(ctx.expr(i + 1)).toSqlString();
+        if (i > 2) {
+          sql.append(", ");
+        }
+        sql.append(col);
+        sql.append("=");
+        sql.append(val);        
+        i += 2;
+      }
+      sql.append(")");
+    }
+    if (trace) {
+      trace(ctx, "Query: " + sql);
+    }
+    if (exec.getOffline()) {
+      evalNull();
+      return;
+    }
+    Query query = exec.executeQuery(ctx, sql.toString(), exec.conf.defaultConnection);
+    if (query.error()) {
+      evalNullClose(query, exec.conf.defaultConnection);
+      return;
+    }
+    int result = 0;
+    ResultSet rs = query.getResultSet();
+    try {
+      while (rs.next()) {
+        result++;
+      }
+    } catch (SQLException e) {
+      evalNullClose(query, exec.conf.defaultConnection);
+      return;
+    }
+    evalInt(result);
+    exec.closeQuery(query, exec.conf.defaultConnection);
+  }
+  
+  /**
+   * PART_COUNT_BY function
+   */
+  public void partCountBy(HplsqlParser.Expr_func_paramsContext ctx) {
+    int cnt = ctx.func_param().size();
+    if (cnt < 1 || exec.getOffline()) {
+      return;
+    }
+    String tabname = evalPop(ctx.func_param(0).expr()).toString();
+    ArrayList<String> keys = null;
+    if (cnt > 1) {
+      keys = new ArrayList<String>();
+      for (int i = 1; i < cnt; i++) {
+        keys.add(evalPop(ctx.func_param(i).expr()).toString().toUpperCase());
+      }
+    }    
+    String sql = "SHOW PARTITIONS " + tabname;
+    Query query = exec.executeQuery(ctx, sql, exec.conf.defaultConnection);
+    if (query.error()) {
+      exec.closeQuery(query, exec.conf.defaultConnection);
+      return;
+    }
+    ResultSet rs = query.getResultSet();
+    HashMap<String, Integer> group = new HashMap<String, Integer>();
+    try {
+      while (rs.next()) {
+        String part = rs.getString(1);
+        String[] parts = part.split("/");
+        String key = parts[0];
+        if (cnt > 1) {
+          StringBuilder k = new StringBuilder();
+          for (int i = 0; i < parts.length; i++) {
+            if (keys.contains(parts[i].split("=")[0].toUpperCase())) {
+              if (k.length() > 0) {
+                k.append("/");
+              }
+              k.append(parts[i]);
+            }
+          }
+          key = k.toString();
+        }
+        Integer count = group.get(key);
+        if (count == null) {
+          count = new Integer(0); 
+        }
+        group.put(key, count + 1);        
+      }
+    } catch (SQLException e) {
+      exec.closeQuery(query, exec.conf.defaultConnection);
+      return;
+    }
+    if (cnt == 1) {
+      evalInt(group.size());
+    }
+    else {
+      for (Map.Entry<String, Integer> i : group.entrySet()) {
+        System.out.println(i.getKey() + '\t' + i.getValue());
+      }
+    }
+    exec.closeQuery(query, exec.conf.defaultConnection);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
----------------------------------------------------------------------
diff --git a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
index 6a67cd0..8299828 100644
--- a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
+++ b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
@@ -94,6 +94,10 @@ public class TestHplsqlLocal {
   }
   
   @Test
+  public void testCreateProcedure2() throws Exception {
+    run("create_procedure2");
+  }
+  @Test
   public void testCreateProcedureNoParams() throws Exception {
     run("create_procedure_no_params");
   }
@@ -162,8 +166,17 @@ public class TestHplsqlLocal {
   public void testIf() throws Exception {
     run("if");
   }
+  
+  @Test
+  public void testIf2() throws Exception {
+    run("if2");
+  }
 
   @Test
+  public void testInclude() throws Exception {
+    run("include");
+  }
+  @Test
   public void testInstr() throws Exception {
     run("instr");
   }
@@ -199,6 +212,11 @@ public class TestHplsqlLocal {
   }
 
   @Test
+  public void testMultDiv() throws Exception {
+    run("mult_div");
+  }
+
+  @Test
   public void testNvl() throws Exception {
     run("nvl");
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
----------------------------------------------------------------------
diff --git a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
index eeaa395..55238ed 100644
--- a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
+++ b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
@@ -43,6 +43,11 @@ public class TestHplsqlOffline {
     run("create_table_ora");
   }
   
+  @Test
+  public void testSelectDb2() throws Exception {
+    run("select_db2");
+  }
+
   /**
    * Run a test file
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/cmp_row_count.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/cmp_row_count.sql b/hplsql/src/test/queries/db/cmp_row_count.sql
new file mode 100644
index 0000000..b33d841
--- /dev/null
+++ b/hplsql/src/test/queries/db/cmp_row_count.sql
@@ -0,0 +1,4 @@
+cmp row_count src, src at hive2conn;
+cmp row_count src where 1=1, src at hive2conn;
+cmp row_count (select 'A' from src), src where 2=2 at hive2conn;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/cmp_sum.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/cmp_sum.sql b/hplsql/src/test/queries/db/cmp_sum.sql
new file mode 100644
index 0000000..32347e1
--- /dev/null
+++ b/hplsql/src/test/queries/db/cmp_sum.sql
@@ -0,0 +1,3 @@
+cmp sum src_dt, src_dt at hive2conn;
+cmp sum src_dt where 1=1, src_dt at hive2conn;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/copy_to_file.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/copy_to_file.sql b/hplsql/src/test/queries/db/copy_to_file.sql
new file mode 100644
index 0000000..6135471
--- /dev/null
+++ b/hplsql/src/test/queries/db/copy_to_file.sql
@@ -0,0 +1,2 @@
+copy src to target/tmp/src.txt;
+copy (select * from src) to target/tmp/src2.txt sqlinsert src2;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/copy_to_hdfs.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/copy_to_hdfs.sql b/hplsql/src/test/queries/db/copy_to_hdfs.sql
new file mode 100644
index 0000000..fd01d7b
--- /dev/null
+++ b/hplsql/src/test/queries/db/copy_to_hdfs.sql
@@ -0,0 +1,2 @@
+--copy src to hdfs src.txt;
+copy (select * from src) to hdfs /user/hplsql/src2.txt delimiter '\01';

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/copy_to_table.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/copy_to_table.sql b/hplsql/src/test/queries/db/copy_to_table.sql
new file mode 100644
index 0000000..674c0fc
--- /dev/null
+++ b/hplsql/src/test/queries/db/copy_to_table.sql
@@ -0,0 +1,2 @@
+copy src to src2 at mysqlconn;
+copy (select * from src) to src2 at mysqlconn;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/part_count.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/part_count.sql b/hplsql/src/test/queries/db/part_count.sql
new file mode 100644
index 0000000..9d62c38
--- /dev/null
+++ b/hplsql/src/test/queries/db/part_count.sql
@@ -0,0 +1,17 @@
+if part_count(partition_date_1) = 5 then
+  print 'success';
+else 
+  print 'failed';
+end if;  
+
+if part_count(partition_date_1, region='1') = 2 then
+  print 'success';
+else 
+  print 'failed';
+end if;  
+
+if part_count(partition_date_1a) is null then    -- table does not exist  
+  print 'success';
+else 
+  print 'failed';
+end if;  

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/part_count_by.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/part_count_by.sql b/hplsql/src/test/queries/db/part_count_by.sql
new file mode 100644
index 0000000..599dc5b
--- /dev/null
+++ b/hplsql/src/test/queries/db/part_count_by.sql
@@ -0,0 +1,4 @@
+part_count_by(partition_date_1); 
+part_count_by(partition_date_1, dt); 
+part_count_by(partition_date_1, dt, region); 
+part_count_by(partition_date_1, region); 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/schema.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/schema.sql b/hplsql/src/test/queries/db/schema.sql
new file mode 100644
index 0000000..0c41569
--- /dev/null
+++ b/hplsql/src/test/queries/db/schema.sql
@@ -0,0 +1,32 @@
+drop table if exists src_dt;
+
+create table src_dt (
+  c1  string,
+  c2  varchar(30),
+  c3  char(30),
+  c4  tinyint,
+  c5  smallint,
+  c6  int,
+  c7  bigint,
+  c8  decimal(19,4),
+  c9  float,
+  c10 double,
+  c11 date,
+  c12 timestamp
+);
+
+insert overwrite table src_dt
+select 
+  value c1,
+  value c2,
+  value c3,
+  cast(key as tinyint) c4,
+  cast(key as smallint) c5,
+  cast(key as int) c6,
+  cast(key as bigint) c7,
+  cast(key as decimal)/10 c8,
+  cast(key as float)/10 c9,
+  cast(key as double)/10 c10,
+  date '2015-09-07' c11,
+  cast(date '2015-09-07' as timestamp) c12
+from src;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/select_into.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/select_into.sql b/hplsql/src/test/queries/db/select_into.sql
index 3995ba2..1da610a 100644
--- a/hplsql/src/test/queries/db/select_into.sql
+++ b/hplsql/src/test/queries/db/select_into.sql
@@ -1,17 +1,33 @@
+DECLARE v_bint BIGINT;
 DECLARE v_int INT;
+DECLARE v_sint SMALLINT;
+DECLARE v_tint TINYINT;
 DECLARE v_dec DECIMAL(18,2);
 DECLARE v_dec0 DECIMAL(18,0);
+DECLARE v_str STRING;
 
 SELECT TOP 1 
+  CAST(1 AS BIGINT),
   CAST(1 AS INT), 
+  CAST(1 AS SMALLINT), 
+  CAST(1 AS TINYINT), 
   CAST(1.1 AS DECIMAL(18,2)),
   CAST(1.1 AS DECIMAL(18,0))   
 INTO 
+  v_bint,
   v_int,
+  v_sint,
+  v_tint,
   v_dec,
   v_dec0  
-FROM src ;
+FROM src;
         
+PRINT 'BIGINT: ' || v_bint;
 PRINT 'INT: ' || v_int;
+PRINT 'SMALLINT: ' || v_sint;
+PRINT 'TINYINT: ' || v_tint;
 PRINT 'DECIMAL: ' || v_dec;
-PRINT 'DECIMAL0: ' || v_dec0;
\ No newline at end of file
+PRINT 'DECIMAL0: ' || v_dec0;
+
+select 'a' into v_str from src limit 1;
+print 'string: ' || v_str;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/select_into2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/select_into2.sql b/hplsql/src/test/queries/db/select_into2.sql
new file mode 100644
index 0000000..e0f738c
--- /dev/null
+++ b/hplsql/src/test/queries/db/select_into2.sql
@@ -0,0 +1,17 @@
+declare v_float float;
+declare v_double double;
+declare v_double2 double precision;
+
+select
+  cast(1.1 as float),
+  cast(1.1 as double),
+  cast(1.1 as double)  
+into
+  v_float,
+  v_double,
+  v_double2
+from src limit 1;
+        
+print 'float: ' || v_float;
+print 'double: ' || v_double;
+print 'double precision: ' || v_double2;

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/create_procedure2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/create_procedure2.sql b/hplsql/src/test/queries/local/create_procedure2.sql
new file mode 100644
index 0000000..8875c6a
--- /dev/null
+++ b/hplsql/src/test/queries/local/create_procedure2.sql
@@ -0,0 +1,16 @@
+CREATE PROCEDURE set_message(IN name STRING, OUT result STRING)
+BEGIN
+  DECLARE str STRING DEFAULT 'Hello, ' || name || '!';
+  Work: begin
+    declare continue handler for sqlexception begin
+      set result = null;
+      print 'error';
+    end;
+    set result = str;
+  end;
+END;
+ 
+DECLARE str STRING;
+CALL set_message('world', str);
+PRINT str;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/if2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/if2.sql b/hplsql/src/test/queries/local/if2.sql
new file mode 100644
index 0000000..b645b86
--- /dev/null
+++ b/hplsql/src/test/queries/local/if2.sql
@@ -0,0 +1,5 @@
+if not (coalesce(1,0) between 3 and 5) then
+  print 'correct';
+else 
+  print 'failed';
+end if;

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/include.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/include.sql b/hplsql/src/test/queries/local/include.sql
new file mode 100644
index 0000000..c1dfb96
--- /dev/null
+++ b/hplsql/src/test/queries/local/include.sql
@@ -0,0 +1,2 @@
+include src/test/queries/local/include_file.sql
+include 'src/test/queries/local/include_file' || '.sql'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/include_file.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/include_file.sql b/hplsql/src/test/queries/local/include_file.sql
new file mode 100644
index 0000000..ac5e0f0
--- /dev/null
+++ b/hplsql/src/test/queries/local/include_file.sql
@@ -0,0 +1 @@
+print 'file included successfully';
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/mult_div.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/mult_div.sql b/hplsql/src/test/queries/local/mult_div.sql
new file mode 100644
index 0000000..ebad8f4
--- /dev/null
+++ b/hplsql/src/test/queries/local/mult_div.sql
@@ -0,0 +1,8 @@
+declare a int default 8;
+declare b int default 4;
+declare c int default 2;
+
+print a/b/c;
+
+set a = 4 * 2 / cast(4 as int) /2;
+set b = 4 * 2 /cast(4 as int)/2;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/offline/select_db2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/offline/select_db2.sql b/hplsql/src/test/queries/offline/select_db2.sql
new file mode 100644
index 0000000..a0d2da5
--- /dev/null
+++ b/hplsql/src/test/queries/offline/select_db2.sql
@@ -0,0 +1,5 @@
+select coalesce(max(info_id)+1,0) into NextID from sproc_info with rr use and keep exclusive locks;
+
+select cd, cd + inc days, cd - inc days + coalesce(inc, 0) days
+from (select date '2015-09-02' as cd, 3 as inc from sysibm.sysdummy1);
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/cmp_row_count.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/cmp_row_count.out.txt b/hplsql/src/test/results/db/cmp_row_count.out.txt
new file mode 100644
index 0000000..16fadfd
--- /dev/null
+++ b/hplsql/src/test/results/db/cmp_row_count.out.txt
@@ -0,0 +1,12 @@
+Ln:1 CMP
+Ln:1 Query 1: SELECT COUNT(1) AS row_count FROM src
+Ln:1 Query 2: SELECT COUNT(1) AS row_count FROM src
+row_count	500	500
+Ln:2 CMP
+Ln:2 Query 1: SELECT COUNT(1) AS row_count FROM src where 1 = 1
+Ln:2 Query 2: SELECT COUNT(1) AS row_count FROM src
+row_count	500	500
+Ln:3 CMP
+Ln:3 Query 1: SELECT COUNT(1) AS row_count FROM (select 'A' from src) t
+Ln:3 Query 2: SELECT COUNT(1) AS row_count FROM src where 2 = 2
+row_count	500	500
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/cmp_sum.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/cmp_sum.out.txt b/hplsql/src/test/results/db/cmp_sum.out.txt
new file mode 100644
index 0000000..fad64b7
--- /dev/null
+++ b/hplsql/src/test/results/db/cmp_sum.out.txt
@@ -0,0 +1,320 @@
+Ln:1 CMP
+Ln:1 Query 1: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt
+Ln:1 Query 2: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt
+row_count	500	500
+c1_count_not_null	500	500
+c1_sum_length	3406	3406
+c1_min_length	5	5
+c1_max_length	7	7
+c2_count_not_null	500	500
+c2_sum_length	3406	3406
+c2_min_length	5	5
+c2_max_length	7	7
+c3_count_not_null	500	500
+c3_sum_length	3406	3406
+c3_min_length	5	5
+c3_max_length	7	7
+c4_count_not_null	106	106
+c4_sum	6697	6697
+c4_min	0	0
+c4_max	126	126
+c5_count_not_null	500	500
+c5_sum	130091	130091
+c5_min	0	0
+c5_max	498	498
+c6_count_not_null	500	500
+c6_sum	130091	130091
+c6_min	0	0
+c6_max	498	498
+c7_count_not_null	500	500
+c7_sum	130091	130091
+c7_min	0	0
+c7_max	498	498
+c8_count_not_null	500	500
+c8_sum	13009.1	13009.1
+c8_min	0	0
+c8_max	49.8	49.8
+c9_count_not_null	500	500
+c9_sum	13009.10001783073	13009.10001783073
+c9_min	0.0	0.0
+c9_max	49.79999923706055	49.79999923706055
+c10_count_not_null	500	500
+c10_sum	13009.09999999999	13009.09999999999
+c10_min	0.0	0.0
+c10_max	49.8	49.8
+c11_count_not_null	500	500
+c11_sum_year	1007500	1007500
+c11_sum_month	4500	4500
+c11_sum_day	3500	3500
+c11_min	null	null
+c11_max	null	null
+c12_count_not_null	500	500
+c12_sum_year	1007500	1007500
+c12_sum_month	4500	4500
+c12_sum_day	3500	3500
+c12_min	null	null
+c12_max	null	null
+Ln:2 CMP
+Ln:2 Query 1: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt where 1 = 1
+Ln:2 Query 2: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt
+row_count	500	500
+c1_count_not_null	500	500
+c1_sum_length	3406	3406
+c1_min_length	5	5
+c1_max_length	7	7
+c2_count_not_null	500	500
+c2_sum_length	3406	3406
+c2_min_length	5	5
+c2_max_length	7	7
+c3_count_not_null	500	500
+c3_sum_length	3406	3406
+c3_min_length	5	5
+c3_max_length	7	7
+c4_count_not_null	106	106
+c4_sum	6697	6697
+c4_min	0	0
+c4_max	126	126
+c5_count_not_null	500	500
+c5_sum	130091	130091
+c5_min	0	0
+c5_max	498	498
+c6_count_not_null	500	500
+c6_sum	130091	130091
+c6_min	0	0
+c6_max	498	498
+c7_count_not_null	500	500
+c7_sum	130091	130091
+c7_min	0	0
+c7_max	498	498
+c8_count_not_null	500	500
+c8_sum	13009.1	13009.1
+c8_min	0	0
+c8_max	49.8	49.8
+c9_count_not_null	500	500
+c9_sum	13009.10001783073	13009.10001783073
+c9_min	0.0	0.0
+c9_max	49.79999923706055	49.79999923706055
+c10_count_not_null	500	500
+c10_sum	13009.09999999999	13009.09999999999
+c10_min	0.0	0.0
+c10_max	49.8	49.8
+c11_count_not_null	500	500
+c11_sum_year	1007500	1007500
+c11_sum_month	4500	4500
+c11_sum_day	3500	3500
+c11_min	null	null
+c11_max	null	null
+c12_count_not_null	500	500
+c12_sum_year	1007500	1007500
+c12_sum_month	4500	4500
+c12_sum_day	3500	3500
+c12_min	null	null
+c12_max	null	null
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/copy_to_file.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/copy_to_file.out.txt b/hplsql/src/test/results/db/copy_to_file.out.txt
new file mode 100644
index 0000000..e571d36
--- /dev/null
+++ b/hplsql/src/test/results/db/copy_to_file.out.txt
@@ -0,0 +1,6 @@
+Ln:1 COPY
+Ln:1 Query executed: 2 columns, output file: target/tmp/src.txt
+Ln:2 COPY
+Ln:2 Statement:
+select * from src
+Ln:2 Query executed: 2 columns, output file: target/tmp/src2.txt
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/copy_to_hdfs.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/copy_to_hdfs.out.txt b/hplsql/src/test/results/db/copy_to_hdfs.out.txt
new file mode 100644
index 0000000..23c0cb2
--- /dev/null
+++ b/hplsql/src/test/results/db/copy_to_hdfs.out.txt
@@ -0,0 +1,4 @@
+Ln:2 COPY
+Ln:2 Statement:
+select * from src
+Ln:2 Query executed: 2 columns, output file: /user/hplsql/src2.txt
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/copy_to_table.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/copy_to_table.out.txt b/hplsql/src/test/results/db/copy_to_table.out.txt
new file mode 100644
index 0000000..411b425
--- /dev/null
+++ b/hplsql/src/test/results/db/copy_to_table.out.txt
@@ -0,0 +1,2 @@
+Ln:1 COPY
+Ln:1 SELECT executed: 2 columns
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/part_count.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/part_count.out.txt b/hplsql/src/test/results/db/part_count.out.txt
new file mode 100644
index 0000000..485ffe1
--- /dev/null
+++ b/hplsql/src/test/results/db/part_count.out.txt
@@ -0,0 +1,15 @@
+Ln:1 IF
+Ln:1 Query: SHOW PARTITIONS partition_date_1
+Ln:1 IF TRUE executed
+Ln:2 PRINT
+success
+Ln:7 IF
+Ln:7 Query: SHOW PARTITIONS partition_date_1 PARTITION (region='1')
+Ln:7 IF TRUE executed
+Ln:8 PRINT
+success
+Ln:13 IF
+Ln:13 Query: SHOW PARTITIONS partition_date_1a
+Ln:13 IF TRUE executed
+Ln:14 PRINT
+success
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/part_count_by.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/part_count_by.out.txt b/hplsql/src/test/results/db/part_count_by.out.txt
new file mode 100644
index 0000000..61f51cd
--- /dev/null
+++ b/hplsql/src/test/results/db/part_count_by.out.txt
@@ -0,0 +1,13 @@
+3
+dt=2000-01-01	2
+dt=2013-12-10	1
+dt=2013-08-08	2
+dt=2013-08-08/region=1	1
+dt=2000-01-01/region=1	1
+dt=2013-12-10/region=2020-20-20	1
+dt=2000-01-01/region=2	1
+dt=2013-08-08/region=10	1
+region=10	1
+region=2020-20-20	1
+region=2	1
+region=1	2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/select_into.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/select_into.out.txt b/hplsql/src/test/results/db/select_into.out.txt
index 80d067e..3f4ae31 100644
--- a/hplsql/src/test/results/db/select_into.out.txt
+++ b/hplsql/src/test/results/db/select_into.out.txt
@@ -1,19 +1,43 @@
-Ln:1 DECLARE v_int INT
-Ln:2 DECLARE v_dec DECIMAL
-Ln:3 DECLARE v_dec0 DECIMAL
-Ln:5 SELECT
-Ln:5 SELECT CAST(1 AS INT), CAST(1.1 AS DECIMAL(18,2)), CAST(1.1 AS DECIMAL(18,0)) FROM src LIMIT 1
-Ln:5 SELECT completed successfully
-Ln:5 SELECT INTO statement executed
-Ln:5 COLUMN: _c0, int
-Ln:5 SET v_int = 1
-Ln:5 COLUMN: _c1, decimal
-Ln:5 SET v_dec = 1.1
-Ln:5 COLUMN: _c2, decimal
-Ln:5 SET v_dec0 = 1
-Ln:15 PRINT
+Ln:1 DECLARE v_bint BIGINT
+Ln:2 DECLARE v_int INT
+Ln:3 DECLARE v_sint SMALLINT
+Ln:4 DECLARE v_tint TINYINT
+Ln:5 DECLARE v_dec DECIMAL
+Ln:6 DECLARE v_dec0 DECIMAL
+Ln:7 DECLARE v_str STRING
+Ln:9 SELECT
+Ln:9 SELECT CAST(1 AS BIGINT), CAST(1 AS INT), CAST(1 AS SMALLINT), CAST(1 AS TINYINT), CAST(1.1 AS DECIMAL(18,2)), CAST(1.1 AS DECIMAL(18,0)) FROM src LIMIT 1
+Ln:9 SELECT completed successfully
+Ln:9 SELECT INTO statement executed
+Ln:9 COLUMN: _c0, bigint
+Ln:9 SET v_bint = 1
+Ln:9 COLUMN: _c1, int
+Ln:9 SET v_int = 1
+Ln:9 COLUMN: _c2, smallint
+Ln:9 SET v_sint = 1
+Ln:9 COLUMN: _c3, tinyint
+Ln:9 SET v_tint = 1
+Ln:9 COLUMN: _c4, decimal
+Ln:9 SET v_dec = 1.1
+Ln:9 COLUMN: _c5, decimal
+Ln:9 SET v_dec0 = 1
+Ln:25 PRINT
+BIGINT: 1
+Ln:26 PRINT
 INT: 1
-Ln:16 PRINT
+Ln:27 PRINT
+SMALLINT: 1
+Ln:28 PRINT
+TINYINT: 1
+Ln:29 PRINT
 DECIMAL: 1.1
-Ln:17 PRINT
-DECIMAL0: 1
\ No newline at end of file
+Ln:30 PRINT
+DECIMAL0: 1
+Ln:32 SELECT
+Ln:32 select 'a' from src LIMIT 1
+Ln:32 SELECT completed successfully
+Ln:32 SELECT INTO statement executed
+Ln:32 COLUMN: _c0, string
+Ln:32 SET v_str = a
+Ln:33 PRINT
+string: a
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/select_into2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/select_into2.out.txt b/hplsql/src/test/results/db/select_into2.out.txt
new file mode 100644
index 0000000..03e67ad
--- /dev/null
+++ b/hplsql/src/test/results/db/select_into2.out.txt
@@ -0,0 +1,19 @@
+Ln:1 DECLARE v_float float
+Ln:2 DECLARE v_double double
+Ln:3 DECLARE v_double2 double precision
+Ln:5 SELECT
+Ln:5 select cast(1.1 as float), cast(1.1 as double), cast(1.1 as double) from src LIMIT 1
+Ln:5 SELECT completed successfully
+Ln:5 SELECT INTO statement executed
+Ln:5 COLUMN: _c0, float
+Ln:5 SET v_float = 1.100000023841858
+Ln:5 COLUMN: _c1, double
+Ln:5 SET v_double = 1.1
+Ln:5 COLUMN: _c2, double
+Ln:5 SET v_double2 = 1.1
+Ln:15 PRINT
+float: 1.100000023841858
+Ln:16 PRINT
+double: 1.1
+Ln:17 PRINT
+double precision: 1.1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/create_procedure2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/create_procedure2.out.txt b/hplsql/src/test/results/local/create_procedure2.out.txt
new file mode 100644
index 0000000..765faa9
--- /dev/null
+++ b/hplsql/src/test/results/local/create_procedure2.out.txt
@@ -0,0 +1,10 @@
+Ln:1 CREATE PROCEDURE set_message
+Ln:13 DECLARE str STRING
+Ln:14 EXEC PROCEDURE set_message
+Ln:14 SET PARAM name = world
+Ln:14 SET PARAM result = null
+Ln:3 DECLARE str STRING = 'Hello, world!'
+Ln:5 DECLARE HANDLER
+Ln:9 SET result = 'Hello, world!'
+Ln:15 PRINT
+Hello, world!
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/if2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/if2.out.txt b/hplsql/src/test/results/local/if2.out.txt
new file mode 100644
index 0000000..63a6213
--- /dev/null
+++ b/hplsql/src/test/results/local/if2.out.txt
@@ -0,0 +1,4 @@
+Ln:1 IF
+Ln:1 IF TRUE executed
+Ln:2 PRINT
+correct
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/include.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/include.out.txt b/hplsql/src/test/results/local/include.out.txt
new file mode 100644
index 0000000..86cfa05
--- /dev/null
+++ b/hplsql/src/test/results/local/include.out.txt
@@ -0,0 +1,8 @@
+Ln:1 INCLUDE src/test/queries/local/include_file.sql
+INLCUDE CONTENT src/test/queries/local/include_file.sql (non-empty)
+Ln:1 PRINT
+file included successfully
+Ln:2 INCLUDE src/test/queries/local/include_file.sql
+INLCUDE CONTENT src/test/queries/local/include_file.sql (non-empty)
+Ln:1 PRINT
+file included successfully
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/mult_div.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/mult_div.out.txt b/hplsql/src/test/results/local/mult_div.out.txt
new file mode 100644
index 0000000..cd17c16
--- /dev/null
+++ b/hplsql/src/test/results/local/mult_div.out.txt
@@ -0,0 +1,7 @@
+Ln:1 DECLARE a int = 8
+Ln:2 DECLARE b int = 4
+Ln:3 DECLARE c int = 2
+Ln:5 PRINT
+1
+Ln:7 SET a = 1
+Ln:8 SET b = 1
\ No newline at end of file