You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by dm...@apache.org on 2015/09/22 15:42:51 UTC

[1/3] hive git commit: HIVE-11783: Extending HPL/SQL parser (Dmitry Tolpeko reviewed by Alan Gates)

Repository: hive
Updated Branches:
  refs/heads/master 523830338 -> 451381cfb


http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/offline/select_db2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/offline/select_db2.out.txt b/hplsql/src/test/results/offline/select_db2.out.txt
new file mode 100644
index 0000000..1d64e8a
--- /dev/null
+++ b/hplsql/src/test/results/offline/select_db2.out.txt
@@ -0,0 +1,6 @@
+Ln:1 SELECT
+Ln:1 select coalesce(max(info_id) + 1, 0) from sproc_info
+Ln:1 Not executed - offline mode set
+Ln:3 SELECT
+Ln:3 select cd, cd + inc days, cd - inc days + coalesce(inc, 0) days from (select date '2015-09-02' as cd, 3 as inc from sysibm.sysdummy1)
+Ln:3 Not executed - offline mode set
\ No newline at end of file


[3/3] hive git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hive

Posted by dm...@apache.org.
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hive


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/451381cf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/451381cf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/451381cf

Branch: refs/heads/master
Commit: 451381cfb28d1b9ff2775d55d92924956b3a5cdb
Parents: 0679078 5238303
Author: Dmitry Tolpeko <dm...@gmail.com>
Authored: Tue Sep 22 06:41:46 2015 -0700
Committer: Dmitry Tolpeko <dm...@gmail.com>
Committed: Tue Sep 22 06:41:46 2015 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ant/QTestGenTask.java    |    11 +
 beeline/src/main/resources/beeline-log4j2.xml   |     5 +-
 bin/beeline                                     |     5 +
 bin/ext/hbaseimport.cmd                         |    35 +
 bin/ext/hbaseimport.sh                          |    27 +
 bin/ext/hbaseschematool.sh                      |    27 +
 .../apache/hadoop/hive/common/ObjectPair.java   |     5 +
 .../hadoop/hive/common/jsonexplain/tez/Op.java  |     8 +-
 .../hive/common/jsonexplain/tez/Stage.java      |    14 +-
 .../common/jsonexplain/tez/TezJsonParser.java   |    17 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   111 +-
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |   196 +
 .../apache/hive/common/util/BloomFilter.java    |    20 +-
 common/src/main/resources/hive-log4j2.xml       |     5 +-
 .../test/resources/hive-exec-log4j2-test.xml    |     5 +-
 common/src/test/resources/hive-log4j2-test.xml  |     5 +-
 data/conf/hive-log4j2.xml                       |     5 +-
 data/conf/tez/hive-site.xml                     |    10 +
 data/files/dynpartdata1.txt                     |     5 +
 data/files/dynpartdata2.txt                     |     6 +
 .../HiveHBaseTableSnapshotInputFormat.java      |    21 +-
 .../queries/positive/hbase_handler_snapshot.q   |     4 +
 .../positive/hbase_handler_snapshot.q.out       |    22 +
 .../deployers/config/hive/hive-log4j2.xml       |     5 +-
 .../svr/src/main/config/webhcat-log4j2.xml      |     5 +-
 .../benchmark/serde/LazySimpleSerDeBench.java   |   453 +
 .../hive/thrift/TestHadoop20SAuthBridge.java    |   420 -
 .../hive/thrift/TestHadoopAuthBridge23.java     |   423 +
 itests/hive-unit/pom.xml                        |    35 +
 .../hadoop/hive/metastore/TestAdminUser.java    |     4 +-
 .../hive/metastore/TestHiveMetaStore.java       |     3 +
 .../metastore/hbase/HBaseIntegrationTests.java  |   117 +
 .../TestHBaseAggrStatsCacheIntegration.java     |   691 +
 .../hive/metastore/hbase/TestHBaseImport.java   |   650 +
 .../metastore/hbase/TestHBaseMetastoreSql.java  |   223 +
 .../hbase/TestHBaseStoreIntegration.java        |  1794 +
 .../hbase/TestStorageDescriptorSharing.java     |   191 +
 .../hive/ql/security/FolderPermissionBase.java  |    17 +-
 itests/qtest/pom.xml                            |    10 +-
 .../test/resources/testconfiguration.properties |     3 +
 itests/util/pom.xml                             |    32 +
 .../metastore/hbase/HBaseStoreTestUtil.java     |    45 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |    50 +-
 .../hive/jdbc/ZooKeeperHiveClientHelper.java    |    32 +-
 metastore/if/hive_metastore.thrift              |    54 +
 metastore/pom.xml                               |    82 +
 .../metastore/hbase/HbaseMetastoreProto.java    | 34901 +++++++++++++++++
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  |  6919 ++--
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h    |   664 +
 .../ThriftHiveMetastore_server.skeleton.cpp     |    25 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  1294 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   371 +
 .../hive/metastore/api/AbortTxnRequest.java     |     2 +-
 .../metastore/api/AddDynamicPartitions.java     |     2 +-
 .../metastore/api/AddPartitionsRequest.java     |     2 +-
 .../hive/metastore/api/AddPartitionsResult.java |     2 +-
 .../hadoop/hive/metastore/api/AggrStats.java    |     2 +-
 .../metastore/api/AlreadyExistsException.java   |     2 +-
 .../metastore/api/BinaryColumnStatsData.java    |     2 +-
 .../metastore/api/BooleanColumnStatsData.java   |     2 +-
 .../hive/metastore/api/CheckLockRequest.java    |     2 +-
 .../metastore/api/ClearFileMetadataRequest.java |   438 +
 .../metastore/api/ClearFileMetadataResult.java  |   283 +
 .../hive/metastore/api/ColumnStatistics.java    |     2 +-
 .../metastore/api/ColumnStatisticsDesc.java     |     2 +-
 .../hive/metastore/api/ColumnStatisticsObj.java |     2 +-
 .../hive/metastore/api/CommitTxnRequest.java    |     2 +-
 .../hive/metastore/api/CompactionRequest.java   |     2 +-
 .../api/ConfigValSecurityException.java         |     2 +-
 .../api/CurrentNotificationEventId.java         |     2 +-
 .../hadoop/hive/metastore/api/Database.java     |     2 +-
 .../apache/hadoop/hive/metastore/api/Date.java  |     2 +-
 .../hive/metastore/api/DateColumnStatsData.java |     2 +-
 .../hadoop/hive/metastore/api/Decimal.java      |     2 +-
 .../metastore/api/DecimalColumnStatsData.java   |     2 +-
 .../metastore/api/DoubleColumnStatsData.java    |     2 +-
 .../hive/metastore/api/DropPartitionsExpr.java  |     2 +-
 .../metastore/api/DropPartitionsRequest.java    |     2 +-
 .../metastore/api/DropPartitionsResult.java     |     2 +-
 .../hive/metastore/api/EnvironmentContext.java  |     2 +-
 .../hadoop/hive/metastore/api/FieldSchema.java  |     2 +-
 .../hive/metastore/api/FireEventRequest.java    |     2 +-
 .../hive/metastore/api/FireEventResponse.java   |     2 +-
 .../hadoop/hive/metastore/api/Function.java     |     2 +-
 .../metastore/api/GetAllFunctionsResponse.java  |    38 +-
 .../api/GetFileMetadataByExprRequest.java       |   548 +
 .../api/GetFileMetadataByExprResult.java        |   703 +
 .../metastore/api/GetFileMetadataRequest.java   |   438 +
 .../metastore/api/GetFileMetadataResult.java    |   540 +
 .../metastore/api/GetOpenTxnsInfoResponse.java  |     2 +-
 .../hive/metastore/api/GetOpenTxnsResponse.java |     2 +-
 .../api/GetPrincipalsInRoleRequest.java         |     2 +-
 .../api/GetPrincipalsInRoleResponse.java        |     2 +-
 .../api/GetRoleGrantsForPrincipalRequest.java   |     2 +-
 .../api/GetRoleGrantsForPrincipalResponse.java  |     2 +-
 .../api/GrantRevokePrivilegeRequest.java        |     2 +-
 .../api/GrantRevokePrivilegeResponse.java       |     2 +-
 .../metastore/api/GrantRevokeRoleRequest.java   |     2 +-
 .../metastore/api/GrantRevokeRoleResponse.java  |     2 +-
 .../hive/metastore/api/HeartbeatRequest.java    |     2 +-
 .../metastore/api/HeartbeatTxnRangeRequest.java |     2 +-
 .../api/HeartbeatTxnRangeResponse.java          |     2 +-
 .../hive/metastore/api/HiveObjectPrivilege.java |     2 +-
 .../hive/metastore/api/HiveObjectRef.java       |     2 +-
 .../apache/hadoop/hive/metastore/api/Index.java |     2 +-
 .../api/IndexAlreadyExistsException.java        |     2 +-
 .../metastore/api/InsertEventRequestData.java   |     2 +-
 .../metastore/api/InvalidInputException.java    |     2 +-
 .../metastore/api/InvalidObjectException.java   |     2 +-
 .../api/InvalidOperationException.java          |     2 +-
 .../api/InvalidPartitionException.java          |     2 +-
 .../hive/metastore/api/LockComponent.java       |     2 +-
 .../hadoop/hive/metastore/api/LockRequest.java  |     2 +-
 .../hadoop/hive/metastore/api/LockResponse.java |     2 +-
 .../hive/metastore/api/LongColumnStatsData.java |     2 +-
 .../hive/metastore/api/MetaException.java       |     2 +-
 .../hive/metastore/api/MetadataPpdResult.java   |   508 +
 .../hive/metastore/api/NoSuchLockException.java |     2 +-
 .../metastore/api/NoSuchObjectException.java    |     2 +-
 .../hive/metastore/api/NoSuchTxnException.java  |     2 +-
 .../hive/metastore/api/NotificationEvent.java   |     2 +-
 .../metastore/api/NotificationEventRequest.java |     2 +-
 .../api/NotificationEventResponse.java          |     2 +-
 .../hive/metastore/api/OpenTxnRequest.java      |     2 +-
 .../hive/metastore/api/OpenTxnsResponse.java    |     2 +-
 .../apache/hadoop/hive/metastore/api/Order.java |     2 +-
 .../hadoop/hive/metastore/api/Partition.java    |     2 +-
 .../api/PartitionListComposingSpec.java         |     2 +-
 .../hive/metastore/api/PartitionSpec.java       |     2 +-
 .../api/PartitionSpecWithSharedSD.java          |     2 +-
 .../hive/metastore/api/PartitionWithoutSD.java  |     2 +-
 .../metastore/api/PartitionsByExprRequest.java  |     2 +-
 .../metastore/api/PartitionsByExprResult.java   |     2 +-
 .../metastore/api/PartitionsStatsRequest.java   |     2 +-
 .../metastore/api/PartitionsStatsResult.java    |     2 +-
 .../metastore/api/PrincipalPrivilegeSet.java    |     2 +-
 .../hadoop/hive/metastore/api/PrivilegeBag.java |     2 +-
 .../hive/metastore/api/PrivilegeGrantInfo.java  |     2 +-
 .../metastore/api/PutFileMetadataRequest.java   |   588 +
 .../metastore/api/PutFileMetadataResult.java    |   283 +
 .../hadoop/hive/metastore/api/ResourceUri.java  |     2 +-
 .../apache/hadoop/hive/metastore/api/Role.java  |     2 +-
 .../hive/metastore/api/RolePrincipalGrant.java  |     2 +-
 .../hadoop/hive/metastore/api/Schema.java       |     2 +-
 .../hadoop/hive/metastore/api/SerDeInfo.java    |     2 +-
 .../api/SetPartitionsStatsRequest.java          |     2 +-
 .../hive/metastore/api/ShowCompactRequest.java  |     2 +-
 .../hive/metastore/api/ShowCompactResponse.java |     2 +-
 .../api/ShowCompactResponseElement.java         |     2 +-
 .../hive/metastore/api/ShowLocksRequest.java    |     2 +-
 .../hive/metastore/api/ShowLocksResponse.java   |     2 +-
 .../metastore/api/ShowLocksResponseElement.java |     2 +-
 .../hadoop/hive/metastore/api/SkewedInfo.java   |     2 +-
 .../hive/metastore/api/StorageDescriptor.java   |     2 +-
 .../metastore/api/StringColumnStatsData.java    |     2 +-
 .../apache/hadoop/hive/metastore/api/Table.java |     2 +-
 .../hive/metastore/api/TableStatsRequest.java   |     2 +-
 .../hive/metastore/api/TableStatsResult.java    |     2 +-
 .../hive/metastore/api/ThriftHiveMetastore.java |  8422 ++--
 .../hive/metastore/api/TxnAbortedException.java |     2 +-
 .../hadoop/hive/metastore/api/TxnInfo.java      |     2 +-
 .../hive/metastore/api/TxnOpenException.java    |     2 +-
 .../apache/hadoop/hive/metastore/api/Type.java  |     2 +-
 .../hive/metastore/api/UnknownDBException.java  |     2 +-
 .../api/UnknownPartitionException.java          |     2 +-
 .../metastore/api/UnknownTableException.java    |     2 +-
 .../hive/metastore/api/UnlockRequest.java       |     2 +-
 .../hadoop/hive/metastore/api/Version.java      |     2 +-
 .../gen-php/metastore/ThriftHiveMetastore.php   |  2810 +-
 .../src/gen/thrift/gen-php/metastore/Types.php  |  1009 +-
 .../hive_metastore/ThriftHiveMetastore-remote   |    49 +-
 .../hive_metastore/ThriftHiveMetastore.py       |  1563 +-
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   734 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |   167 +
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   267 +
 .../hadoop/hive/metastore/HiveAlterHandler.java |    38 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |   272 +-
 .../hive/metastore/HiveMetaStoreClient.java     |   112 +-
 .../hadoop/hive/metastore/IMetaStoreClient.java |     6 +
 .../hadoop/hive/metastore/ObjectStore.java      |   437 +-
 .../hive/metastore/PartFilterExprUtil.java      |   149 +
 .../apache/hadoop/hive/metastore/RawStore.java  |    66 +-
 .../hadoop/hive/metastore/RawStoreProxy.java    |     5 +-
 .../hive/metastore/RetryingHMSHandler.java      |    33 +-
 .../hbase/AggrStatsInvalidatorFilter.java       |   121 +
 .../hadoop/hive/metastore/hbase/Counter.java    |    53 +
 .../hive/metastore/hbase/HBaseConnection.java   |    96 +
 .../metastore/hbase/HBaseFilterPlanUtil.java    |   612 +
 .../hive/metastore/hbase/HBaseImport.java       |   535 +
 .../hive/metastore/hbase/HBaseReadWrite.java    |  2106 +
 .../hive/metastore/hbase/HBaseSchemaTool.java   |   239 +
 .../hadoop/hive/metastore/hbase/HBaseStore.java |  2387 ++
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |  1340 +
 .../hive/metastore/hbase/ObjectCache.java       |    81 +
 .../hive/metastore/hbase/PartitionCache.java    |   168 +
 .../metastore/hbase/PartitionKeyComparator.java |   292 +
 .../hbase/SharedStorageDescriptor.java          |   251 +
 .../hadoop/hive/metastore/hbase/StatsCache.java |   326 +
 .../metastore/hbase/TephraHBaseConnection.java  |   127 +
 .../metastore/hbase/VanillaHBaseConnection.java |   137 +
 .../stats/BinaryColumnStatsAggregator.java      |    35 +
 .../stats/BooleanColumnStatsAggregator.java     |    35 +
 .../hbase/stats/ColumnStatsAggregator.java      |    26 +
 .../stats/ColumnStatsAggregatorFactory.java     |    94 +
 .../stats/DecimalColumnStatsAggregator.java     |    43 +
 .../stats/DoubleColumnStatsAggregator.java      |    36 +
 .../hbase/stats/LongColumnStatsAggregator.java  |    36 +
 .../stats/StringColumnStatsAggregator.java      |    36 +
 .../hive/metastore/parser/ExpressionTree.java   |     9 +-
 .../hive/metastore/tools/HiveMetaTool.java      |     5 +
 .../hadoop/hive/metastore/txn/TxnHandler.java   |    61 +-
 .../metastore/hbase/hbase_metastore_proto.proto |   282 +
 .../DummyRawStoreControlledCommit.java          |    56 +-
 .../DummyRawStoreForJdoConnection.java          |    50 +-
 .../hadoop/hive/metastore/TestObjectStore.java  |    43 +-
 .../hadoop/hive/metastore/hbase/MockUtils.java  |   199 +
 .../hbase/TestHBaseAggregateStatsCache.java     |   316 +
 .../hbase/TestHBaseFilterPlanUtil.java          |   483 +
 .../hive/metastore/hbase/TestHBaseStore.java    |  1307 +
 .../metastore/hbase/TestHBaseStoreCached.java   |   378 +
 .../hbase/TestSharedStorageDescriptor.java      |   153 +
 pom.xml                                         |    25 +-
 .../hadoop/hive/ql/plan/api/Adjacency.java      |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Graph.java   |     2 +-
 .../hadoop/hive/ql/plan/api/Operator.java       |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Query.java   |     2 +-
 .../hadoop/hive/ql/plan/api/QueryPlan.java      |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Stage.java   |     2 +-
 .../apache/hadoop/hive/ql/plan/api/Task.java    |     2 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |    17 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   333 +-
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |    24 +-
 .../hadoop/hive/ql/exec/KeyWrapperFactory.java  |     4 +
 .../hadoop/hive/ql/exec/MapJoinOperator.java    |     8 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java    |    31 +-
 .../apache/hadoop/hive/ql/exec/Operator.java    |     3 +-
 .../ql/exec/SparkHashTableSinkOperator.java     |     3 +-
 .../hadoop/hive/ql/exec/StatsNoJobTask.java     |    25 +-
 .../apache/hadoop/hive/ql/exec/StatsTask.java   |    13 +-
 .../apache/hadoop/hive/ql/exec/Utilities.java   |    11 +-
 .../persistence/BytesBytesMultiHashMap.java     |    11 +-
 .../persistence/HybridHashTableContainer.java   |    68 +-
 .../ql/exec/persistence/PTFRowContainer.java    |    14 +-
 .../hive/ql/exec/persistence/RowContainer.java  |    12 +-
 .../hadoop/hive/ql/exec/spark/SparkPlan.java    |     3 +-
 .../hive/ql/exec/spark/SparkPlanGenerator.java  |     3 +-
 .../hive/ql/exec/spark/SparkRecordHandler.java  |     3 +-
 .../hadoop/hive/ql/exec/spark/SparkTask.java    |     2 +-
 .../ql/exec/spark/status/SparkJobMonitor.java   |     2 +-
 .../hadoop/hive/ql/exec/tez/DagUtils.java       |     3 +
 .../hive/ql/exec/tez/HashTableLoader.java       |     7 +-
 .../hadoop/hive/ql/exec/tez/InPlaceUpdates.java |    65 +
 .../hive/ql/exec/tez/RecordProcessor.java       |     3 +-
 .../hive/ql/exec/tez/ReduceRecordProcessor.java |     1 -
 .../hive/ql/exec/tez/ReduceRecordSource.java    |     3 +-
 .../hadoop/hive/ql/exec/tez/TezJobMonitor.java  |    70 +-
 .../hadoop/hive/ql/exec/tez/TezProcessor.java   |     3 +-
 .../apache/hadoop/hive/ql/exec/tez/TezTask.java |    20 +-
 .../ql/exec/tez/tools/KeyValuesInputMerger.java |     1 -
 .../ql/exec/vector/VectorizationContext.java    |    10 +-
 .../apache/hadoop/hive/ql/hooks/ATSHook.java    |     9 +-
 .../hadoop/hive/ql/hooks/LineageLogger.java     |    95 +-
 .../hive/ql/io/CombineHiveInputFormat.java      |    10 +-
 .../hadoop/hive/ql/io/HiveInputFormat.java      |     5 +-
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |   151 +-
 .../apache/hadoop/hive/ql/io/orc/OrcSerde.java  |     1 +
 .../apache/hadoop/hive/ql/io/orc/OrcStruct.java |     2 +-
 .../hadoop/hive/ql/io/orc/RecordReaderImpl.java |     4 +-
 .../hive/ql/io/parquet/ProjectionPusher.java    |     3 +-
 .../serde/ParquetHiveArrayInspector.java        |    12 +
 .../ql/io/parquet/timestamp/NanoTimeUtils.java  |    23 +-
 .../hive/ql/io/sarg/ConvertAstToSearchArg.java  |     4 +
 .../apache/hadoop/hive/ql/lib/RuleRegExp.java   |    61 +-
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |   195 -
 .../apache/hadoop/hive/ql/metadata/Hive.java    |    34 +-
 .../hadoop/hive/ql/metadata/Partition.java      |    29 +-
 .../ql/optimizer/ColumnPrunerProcFactory.java   |     3 +
 .../hive/ql/optimizer/ConvertJoinMapJoin.java   |    22 +-
 .../hive/ql/optimizer/GenMapRedUtils.java       |    57 +-
 .../ql/optimizer/ReduceSinkMapJoinProc.java     |    19 +-
 .../calcite/reloperators/HiveBetween.java       |    75 +
 .../optimizer/calcite/reloperators/HiveIn.java  |    41 +
 .../calcite/reloperators/HiveLimit.java         |    57 -
 .../calcite/reloperators/HiveSort.java          |   110 -
 .../calcite/reloperators/HiveSortLimit.java     |   110 +
 .../rules/HiveAggregateProjectMergeRule.java    |   151 +
 .../calcite/rules/HivePreFilteringRule.java     |    37 +-
 .../calcite/rules/HiveRelFieldTrimmer.java      |   145 +-
 .../calcite/stats/HiveRelMdMemory.java          |     9 +-
 .../calcite/stats/HiveRelMdParallelism.java     |     4 +-
 .../calcite/translator/ASTConverter.java        |    24 +-
 .../calcite/translator/HiveOpConverter.java     |     8 +-
 .../translator/PlanModifierForASTConv.java      |    14 +-
 .../translator/PlanModifierForReturnPath.java   |     4 -
 .../calcite/translator/PlanModifierUtil.java    |     4 +-
 .../translator/SqlFunctionConverter.java        |    16 +-
 .../hive/ql/optimizer/lineage/LineageCtx.java   |     8 +-
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |     7 +-
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |    28 +-
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |    17 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g    |     9 +-
 .../apache/hadoop/hive/ql/parse/QBSubQuery.java |     7 -
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |     4 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |     2 +
 .../hadoop/hive/ql/parse/SubQueryUtils.java     |    11 -
 .../hive/ql/parse/spark/SparkCompiler.java      |     3 +-
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java |    21 +
 .../hadoop/hive/ql/plan/HiveOperation.java      |     1 +
 .../hive/ql/plan/ShowCreateDatabaseDesc.java    |    94 +
 .../AuthorizationPreEventListener.java          |     2 +-
 .../authorization/plugin/HiveOperationType.java |     1 +
 .../plugin/sqlstd/Operation2Privilege.java      |     2 +
 .../sqlstd/SQLStdHiveAccessController.java      |     5 +
 .../hadoop/hive/ql/session/SessionState.java    |    37 +-
 .../apache/hadoop/hive/ql/stats/StatsUtils.java |    87 +-
 .../org/apache/hadoop/hive/ql/udf/UDFJson.java  |     2 +
 .../hive/ql/udf/generic/GenericUDAFMax.java     |    16 +-
 ql/src/main/resources/hive-exec-log4j2.xml      |     5 +-
 ql/src/main/resources/tez-container-log4j2.xml  |     5 +-
 .../hive/metastore/TestMetastoreExpr.java       |     2 +-
 .../persistence/TestBytesBytesMultiHashMap.java |     3 +
 .../ql/exec/persistence/TestHashPartition.java  |    29 +
 .../exec/persistence/TestPTFRowContainer.java   |    31 +-
 .../hadoop/hive/ql/io/orc/TestOrcStruct.java    |     2 +
 .../serde/TestParquetTimestampUtils.java        |    38 +-
 ...nMapRedUtilsUsePartitionColumnsNegative.java |    73 +
 ...nMapRedUtilsUsePartitionColumnsPositive.java |    61 +
 .../authorization_set_show_current_role.q       |     3 +
 .../clientpositive/bucket_map_join_tez1.q       |    31 +
 .../queries/clientpositive/cbo_rp_auto_join17.q |    14 +
 .../cbo_rp_cross_product_check_2.q              |    31 +
 .../clientpositive/drop_table_with_index.q      |    35 +
 .../test/queries/clientpositive/dynpart_merge.q |    28 +
 .../dynpart_sort_opt_vectorization.q            |     2 +
 .../clientpositive/dynpart_sort_optimization.q  |     2 +
 .../queries/clientpositive/exchgpartition2lel.q |    32 +
 ql/src/test/queries/clientpositive/lineage3.q   |    26 +
 .../test/queries/clientpositive/load_orc_part.q |     5 +
 .../parquet_mixed_partition_formats.q           |    42 +
 .../clientpositive/parquet_ppd_boolean.q        |    42 +-
 .../queries/clientpositive/parquet_ppd_char.q   |    46 +-
 .../queries/clientpositive/parquet_ppd_date.q   |    64 +-
 .../clientpositive/parquet_ppd_decimal.q        |   106 +-
 .../clientpositive/parquet_ppd_timestamp.q      |    62 +-
 .../clientpositive/parquet_ppd_varchar.q        |    46 +-
 .../clientpositive/parquet_predicate_pushdown.q |    20 +-
 .../clientpositive/show_create_database.q       |     3 +
 .../queries/clientpositive/subquery_views.q     |    22 +-
 .../queries/clientpositive/union_fast_stats.q   |    68 +
 .../queries/clientpositive/vector_char_cast.q   |     9 +
 .../queries/clientpositive/windowing_udaf.q     |     4 +
 .../subquery_exists_implicit_gby.q.out          |     8 +-
 .../subquery_nested_subquery.q.out              |     4 +-
 .../subquery_notexists_implicit_gby.q.out       |     8 +-
 .../subquery_windowing_corr.q.out               |     7 +-
 .../alter_partition_coltype.q.out               |     8 +-
 .../clientpositive/annotate_stats_groupby.q.out |   106 +-
 .../annotate_stats_groupby2.q.out               |    28 +-
 .../authorization_explain.q.java1.7.out         |     2 +-
 .../authorization_explain.q.java1.8.out         |     2 +-
 .../authorization_set_show_current_role.q.out   |     8 +
 .../results/clientpositive/auto_join18.q.out    |    12 +-
 .../auto_join18_multi_distinct.q.out            |    12 +-
 .../results/clientpositive/auto_join27.q.out    |    18 +-
 .../results/clientpositive/auto_join32.q.out    |     4 +-
 .../clientpositive/binarysortable_1.q.out       |   Bin 4329 -> 4325 bytes
 .../clientpositive/cbo_rp_auto_join17.q.out     |   118 +
 .../cbo_rp_cross_product_check_2.q.out          |   699 +
 .../clientpositive/correlationoptimizer2.q.out  |   220 +-
 .../clientpositive/correlationoptimizer6.q.out  |   232 +-
 ql/src/test/results/clientpositive/count.q.out  |    14 +-
 .../results/clientpositive/ctas_colname.q.out   |    52 +-
 .../test/results/clientpositive/database.q.out  |     2 +-
 .../clientpositive/decimal_precision.q.out      |     4 +-
 .../results/clientpositive/decimal_udf.q.out    |    30 +-
 .../results/clientpositive/distinct_stats.q.out |    14 +-
 .../clientpositive/drop_table_with_index.q.out  |   152 +
 .../results/clientpositive/dynpart_merge.q.out  |    99 +
 .../dynpart_sort_opt_vectorization.q.out        |   117 +-
 .../dynpart_sort_optimization.q.out             |   117 +-
 ...ryption_select_read_only_encrypted_tbl.q.out |     4 +-
 .../clientpositive/exchgpartition2lel.q.out     |   182 +
 .../clientpositive/explain_dependency.q.out     |    18 +-
 .../clientpositive/explain_dependency2.q.out    |    16 +-
 .../clientpositive/explain_logical.q.out        |    78 +-
 .../clientpositive/fetch_aggregation.q.out      |     4 +-
 .../test/results/clientpositive/gby_star.q.out  |    54 +-
 .../test/results/clientpositive/groupby12.q.out |     6 +-
 .../results/clientpositive/groupby5_map.q.out   |     4 +-
 .../clientpositive/groupby5_map_skew.q.out      |     4 +-
 .../results/clientpositive/groupby_cube1.q.out  |    12 +-
 .../groupby_distinct_samekey.q.out              |     6 +-
 .../clientpositive/groupby_grouping_sets2.q.out |    10 +-
 .../clientpositive/groupby_grouping_sets3.q.out |    12 +-
 .../clientpositive/groupby_grouping_sets5.q.out |     8 +-
 .../clientpositive/groupby_grouping_sets6.q.out |     8 +-
 .../clientpositive/groupby_position.q.out       |    36 +-
 .../clientpositive/groupby_resolution.q.out     |    60 +-
 .../clientpositive/groupby_rollup1.q.out        |    12 +-
 .../clientpositive/groupby_sort_10.q.out        |     8 +-
 .../clientpositive/groupby_sort_11.q.out        |    10 +-
 .../results/clientpositive/groupby_sort_8.q.out |    12 +-
 ql/src/test/results/clientpositive/having.q.out |    62 +-
 .../test/results/clientpositive/having2.q.out   |    12 +-
 .../clientpositive/index_auto_mult_tables.q.out |    12 +-
 .../clientpositive/index_auto_self_join.q.out   |    12 +-
 .../clientpositive/index_auto_update.q.out      |     6 +-
 .../index_bitmap_auto_partitioned.q.out         |     6 +-
 .../index_bitmap_compression.q.out              |     6 +-
 .../infer_bucket_sort_dyn_part.q.out            |     4 +-
 .../infer_bucket_sort_map_operators.q.out       |     4 +-
 .../results/clientpositive/input4.q.java1.7.out |     2 +-
 .../results/clientpositive/input4.q.java1.8.out |     2 +-
 .../results/clientpositive/join0.q.java1.7.out  |     2 +-
 .../results/clientpositive/join0.q.java1.8.out  |     4 +-
 ql/src/test/results/clientpositive/join18.q.out |    12 +-
 .../clientpositive/join18_multi_distinct.q.out  |    12 +-
 ql/src/test/results/clientpositive/join31.q.out |    36 +-
 .../limit_partition_metadataonly.q.out          |     4 +-
 .../results/clientpositive/limit_pushdown.q.out |    36 +-
 .../test/results/clientpositive/lineage2.q.out  |     2 +-
 .../test/results/clientpositive/lineage3.q.out  |    72 +-
 .../list_bucket_dml_6.q.java1.7.out             |    12 +-
 .../list_bucket_dml_6.q.java1.8.out             |    12 +-
 .../clientpositive/list_bucket_dml_7.q.out      |    12 +-
 .../list_bucket_query_multiskew_3.q.out         |     2 +-
 .../results/clientpositive/load_orc_part.q.out  |    26 +
 .../clientpositive/mapjoin_mapjoin.q.out        |    32 +-
 .../clientpositive/metadata_only_queries.q.out  |     4 +-
 .../results/clientpositive/metadataonly1.q.out  |   112 +-
 .../results/clientpositive/multiMapJoin2.q.out  |   226 +-
 .../nonblock_op_deduplicate.q.out               |     8 +-
 .../results/clientpositive/nonmr_fetch.q.out    |    14 +-
 .../results/clientpositive/parallel_join0.q.out |     2 +-
 .../parquet_mixed_partition_formats.q.out       |   303 +
 .../clientpositive/parquet_ppd_boolean.q.out    |   194 +-
 .../clientpositive/parquet_ppd_char.q.out       |   224 +-
 .../clientpositive/parquet_ppd_date.q.out       |   324 +-
 .../clientpositive/parquet_ppd_decimal.q.out    |   594 +-
 .../clientpositive/parquet_ppd_timestamp.q.out  |   314 +-
 .../clientpositive/parquet_ppd_varchar.q.out    |   224 +-
 .../parquet_predicate_pushdown.q.out            |   118 +-
 .../clientpositive/partition_multilevels.q.out  |     8 +-
 .../clientpositive/plan_json.q.java1.7.out      |     2 +-
 .../clientpositive/plan_json.q.java1.8.out      |     2 +-
 .../test/results/clientpositive/ppd_gby.q.out   |    12 +-
 .../test/results/clientpositive/ppd_gby2.q.out  |    60 +-
 .../clientpositive/ppd_join_filter.q.out        |    98 +-
 .../ql_rewrite_gbtoidx_cbo_1.q.out              |   168 +-
 .../ql_rewrite_gbtoidx_cbo_2.q.out              |    94 +-
 .../reduce_deduplicate_extended.q.out           |    32 +-
 .../clientpositive/selectDistinctStar.q.out     |    44 +-
 .../clientpositive/show_create_database.q.out   |    19 +
 .../clientpositive/spark/auto_join18.q.out      |    10 +-
 .../spark/auto_join18_multi_distinct.q.out      |    12 +-
 .../clientpositive/spark/auto_join27.q.out      |    18 +-
 .../clientpositive/spark/auto_join32.q.out      |    53 +-
 .../spark/bucket_map_join_tez1.q.out            |   357 +
 .../results/clientpositive/spark/count.q.out    |    14 +-
 .../clientpositive/spark/groupby5_map.q.out     |     4 +-
 .../spark/groupby5_map_skew.q.out               |     4 +-
 .../clientpositive/spark/groupby_cube1.q.out    |    12 +-
 .../clientpositive/spark/groupby_position.q.out |    18 +-
 .../spark/groupby_resolution.q.out              |    60 +-
 .../clientpositive/spark/groupby_rollup1.q.out  |    12 +-
 .../results/clientpositive/spark/having.q.out   |    62 +-
 .../spark/infer_bucket_sort_map_operators.q.out |     4 +-
 .../results/clientpositive/spark/join18.q.out   |    10 +-
 .../spark/join18_multi_distinct.q.out           |    12 +-
 .../results/clientpositive/spark/join31.q.out   |    36 +-
 .../spark/limit_partition_metadataonly.q.out    |     4 +-
 .../clientpositive/spark/limit_pushdown.q.out   |    34 +-
 .../clientpositive/spark/mapjoin_mapjoin.q.out  |    24 +-
 .../spark/metadata_only_queries.q.out           |     4 +-
 .../clientpositive/spark/ppd_join_filter.q.out  |    90 +-
 .../spark/ql_rewrite_gbtoidx_cbo_1.q.out        |   168 +-
 .../clientpositive/spark/stats_only_null.q.out  |     8 +-
 .../clientpositive/spark/subquery_in.q.out      |    36 +-
 .../results/clientpositive/spark/union11.q.out  |    42 +-
 .../results/clientpositive/spark/union14.q.out  |    28 +-
 .../results/clientpositive/spark/union15.q.out  |    28 +-
 .../results/clientpositive/spark/union28.q.out  |     4 +-
 .../results/clientpositive/spark/union30.q.out  |     4 +-
 .../results/clientpositive/spark/union33.q.out  |     8 +-
 .../results/clientpositive/spark/union5.q.out   |    34 +-
 .../results/clientpositive/spark/union7.q.out   |    28 +-
 .../clientpositive/spark/union_remove_21.q.out  |     4 +-
 .../spark/vector_count_distinct.q.out           |     4 +-
 .../spark/vector_decimal_aggregate.q.out        |    12 +-
 .../spark/vector_distinct_2.q.out               |    28 +-
 .../clientpositive/spark/vector_groupby_3.q.out |    30 +-
 .../spark/vector_mapjoin_reduce.q.out           |    36 +-
 .../clientpositive/spark/vector_orderby_5.q.out |     6 +-
 .../clientpositive/spark/vectorization_0.q.out  |    16 +-
 .../clientpositive/spark/vectorization_13.q.out |    32 +-
 .../clientpositive/spark/vectorization_15.q.out |    16 +-
 .../clientpositive/spark/vectorization_16.q.out |    16 +-
 .../clientpositive/spark/vectorization_9.q.out  |    16 +-
 .../spark/vectorization_pushdown.q.out          |     4 +-
 .../spark/vectorization_short_regress.q.out     |    74 +-
 .../spark/vectorized_nested_mapjoin.q.out       |    18 +-
 .../spark/vectorized_timestamp_funcs.q.out      |    12 +-
 .../clientpositive/stats_only_null.q.out        |     8 +-
 .../results/clientpositive/stats_ppr_all.q.out  |    16 +-
 .../subq_where_serialization.q.out              |    18 +-
 .../clientpositive/subquery_exists_having.q.out |    48 +-
 .../results/clientpositive/subquery_in.q.out    |    36 +-
 .../clientpositive/subquery_in_having.q.out     |   260 +-
 .../clientpositive/subquery_notexists.q.out     |    18 +-
 .../subquery_notexists_having.q.out             |    26 +-
 .../results/clientpositive/subquery_notin.q.out |    24 +-
 .../subquery_notin_having.q.java1.7.out         |    50 +-
 .../subquery_unqualcolumnrefs.q.out             |    74 +-
 .../results/clientpositive/subquery_views.q.out |   124 +-
 .../tez/bucket_map_join_tez1.q.out              |   333 +
 .../clientpositive/tez/constprog_dpp.q.out      |     4 +-
 .../test/results/clientpositive/tez/count.q.out |    14 +-
 .../tez/dynamic_partition_pruning.q.out         |    88 +-
 .../tez/dynpart_sort_opt_vectorization.q.out    |   102 +-
 .../tez/dynpart_sort_optimization.q.out         |   101 +-
 .../clientpositive/tez/explainuser_1.q.out      |  2799 +-
 .../clientpositive/tez/explainuser_2.q.out      |  4004 +-
 .../clientpositive/tez/explainuser_3.q.out      |    10 +-
 .../results/clientpositive/tez/having.q.out     |    62 +-
 .../clientpositive/tez/limit_pushdown.q.out     |    34 +-
 .../clientpositive/tez/mapjoin_mapjoin.q.out    |    24 +-
 .../tez/metadata_only_queries.q.out             |     4 +-
 .../clientpositive/tez/metadataonly1.q.out      |    44 +-
 .../test/results/clientpositive/tez/mrr.q.out   |    94 +-
 .../clientpositive/tez/selectDistinctStar.q.out |    44 +-
 .../tez/show_create_database.q.out              |    19 +
 .../clientpositive/tez/stats_only_null.q.out    |     8 +-
 .../clientpositive/tez/subquery_in.q.out        |    36 +-
 .../results/clientpositive/tez/tez_dml.q.out    |     6 +-
 .../results/clientpositive/tez/union5.q.out     |    44 +-
 .../results/clientpositive/tez/union7.q.out     |    28 +-
 .../clientpositive/tez/unionDistinct_1.q.out    |     8 +-
 .../clientpositive/tez/union_fast_stats.q.out   |   526 +
 .../clientpositive/tez/vector_aggregate_9.q.out |     4 +-
 .../tez/vector_binary_join_groupby.q.out        |     4 +-
 .../clientpositive/tez/vector_char_cast.q.out   |    35 +
 .../tez/vector_count_distinct.q.out             |     4 +-
 .../tez/vector_decimal_aggregate.q.out          |    12 +-
 .../tez/vector_decimal_precision.q.out          |     4 +-
 .../clientpositive/tez/vector_decimal_udf.q.out |    30 +-
 .../clientpositive/tez/vector_distinct_2.q.out  |    28 +-
 .../clientpositive/tez/vector_groupby_3.q.out   |    30 +-
 .../tez/vector_groupby_reduce.q.out             |     8 +-
 .../tez/vector_grouping_sets.q.out              |     8 +-
 .../tez/vector_mapjoin_reduce.q.out             |    36 +-
 .../clientpositive/tez/vector_orderby_5.q.out   |     6 +-
 .../clientpositive/tez/vector_outer_join2.q.out |    20 +-
 .../tez/vector_partition_diff_num_cols.q.out    |    20 +-
 .../tez/vector_partitioned_date_time.q.out      |    12 +-
 .../tez/vector_reduce_groupby_decimal.q.out     |    24 +-
 .../clientpositive/tez/vectorization_0.q.out    |    16 +-
 .../clientpositive/tez/vectorization_13.q.out   |    32 +-
 .../clientpositive/tez/vectorization_15.q.out   |    16 +-
 .../clientpositive/tez/vectorization_16.q.out   |    16 +-
 .../clientpositive/tez/vectorization_9.q.out    |    16 +-
 .../tez/vectorization_limit.q.out               |    14 +-
 .../tez/vectorization_pushdown.q.out            |     4 +-
 .../tez/vectorization_short_regress.q.out       |    74 +-
 .../tez/vectorized_distinct_gby.q.out           |     8 +-
 .../vectorized_dynamic_partition_pruning.q.out  |    88 +-
 .../tez/vectorized_nested_mapjoin.q.out         |    18 +-
 .../clientpositive/tez/vectorized_parquet.q.out |     6 +-
 .../tez/vectorized_timestamp_funcs.q.out        |    12 +-
 ql/src/test/results/clientpositive/udf8.q.out   |     4 +-
 .../test/results/clientpositive/udf_count.q.out |    16 +-
 .../test/results/clientpositive/union11.q.out   |    70 +-
 .../test/results/clientpositive/union14.q.out   |    32 +-
 .../test/results/clientpositive/union15.q.out   |    38 +-
 .../test/results/clientpositive/union28.q.out   |     8 +-
 .../test/results/clientpositive/union30.q.out   |     8 +-
 .../test/results/clientpositive/union33.q.out   |     8 +-
 ql/src/test/results/clientpositive/union5.q.out |    48 +-
 ql/src/test/results/clientpositive/union7.q.out |    32 +-
 .../clientpositive/unionDistinct_1.q.out        |     8 +-
 .../clientpositive/union_fast_stats.q.out       |   526 +
 .../clientpositive/union_remove_21.q.out        |     8 +-
 .../clientpositive/vector_aggregate_9.q.out     |     4 +-
 .../vector_aggregate_without_gby.q.out          |     4 +-
 .../vector_binary_join_groupby.q.out            |     4 +-
 .../clientpositive/vector_char_cast.q.out       |    35 +
 .../clientpositive/vector_count_distinct.q.out  |     6 +-
 .../vector_decimal_aggregate.q.out              |    12 +-
 .../vector_decimal_precision.q.out              |     4 +-
 .../clientpositive/vector_decimal_udf.q.out     |    30 +-
 .../clientpositive/vector_distinct_2.q.out      |    28 +-
 .../clientpositive/vector_groupby_3.q.out       |    30 +-
 .../clientpositive/vector_groupby_reduce.q.out  |     8 +-
 .../clientpositive/vector_grouping_sets.q.out   |     8 +-
 .../clientpositive/vector_left_outer_join.q.out |     8 +-
 .../clientpositive/vector_mapjoin_reduce.q.out  |    36 +-
 .../clientpositive/vector_orderby_5.q.out       |     6 +-
 .../clientpositive/vector_outer_join1.q.out     |     8 +-
 .../clientpositive/vector_outer_join2.q.out     |    28 +-
 .../clientpositive/vector_outer_join3.q.out     |    24 +-
 .../clientpositive/vector_outer_join4.q.out     |     8 +-
 .../clientpositive/vector_outer_join5.q.out     |    48 +-
 .../vector_partition_diff_num_cols.q.out        |    20 +-
 .../vector_partitioned_date_time.q.out          |    12 +-
 .../vector_reduce_groupby_decimal.q.out         |    24 +-
 .../clientpositive/vectorization_0.q.out        |    16 +-
 .../clientpositive/vectorization_13.q.out       |    32 +-
 .../clientpositive/vectorization_15.q.out       |    16 +-
 .../clientpositive/vectorization_16.q.out       |    16 +-
 .../clientpositive/vectorization_9.q.out        |    16 +-
 .../clientpositive/vectorization_limit.q.out    |    16 +-
 .../clientpositive/vectorization_pushdown.q.out |     4 +-
 .../vectorization_short_regress.q.out           |    74 +-
 .../vectorized_distinct_gby.q.out               |    12 +-
 .../vectorized_nested_mapjoin.q.out             |    26 +-
 .../clientpositive/vectorized_parquet.q.out     |     6 +-
 .../vectorized_parquet_types.q.out              |     6 +-
 .../vectorized_timestamp_funcs.q.out            |    12 +-
 .../results/clientpositive/windowing_udaf.q.out |    12 +
 ql/src/test/templates/TestCliDriver.vm          |     3 +-
 .../hadoop/hive/serde/test/InnerStruct.java     |     2 +-
 .../hadoop/hive/serde/test/ThriftTestObj.java   |     2 +-
 .../hadoop/hive/serde2/thrift/test/Complex.java |     2 +-
 .../hive/serde2/thrift/test/IntString.java      |     2 +-
 .../hive/serde2/thrift/test/MegaStruct.java     |     2 +-
 .../hive/serde2/thrift/test/MiniStruct.java     |     2 +-
 .../hive/serde2/thrift/test/SetIntString.java   |     2 +-
 .../hive/serde2/ColumnProjectionUtils.java      |    22 +
 .../apache/hadoop/hive/serde2/WriteBuffers.java |    10 +-
 .../BinarySortableSerDeWithEndPrefix.java       |    41 +
 .../hadoop/hive/serde2/lazy/LazyByte.java       |     4 +
 .../hadoop/hive/serde2/lazy/LazyDouble.java     |     4 +
 .../hadoop/hive/serde2/lazy/LazyFloat.java      |     4 +
 .../hadoop/hive/serde2/lazy/LazyInteger.java    |     4 +
 .../hadoop/hive/serde2/lazy/LazyLong.java       |     4 +
 .../hadoop/hive/serde2/lazy/LazyShort.java      |     4 +
 .../hadoop/hive/serde2/lazy/LazyUtils.java      |    28 +
 .../hadoop/hive/service/HiveClusterStatus.java  |     2 +-
 .../hive/service/HiveServerException.java       |     2 +-
 .../apache/hadoop/hive/service/ThriftHive.java  |     2 +-
 .../service/cli/thrift/TArrayTypeEntry.java     |     2 +-
 .../hive/service/cli/thrift/TBinaryColumn.java  |     2 +-
 .../hive/service/cli/thrift/TBoolColumn.java    |     2 +-
 .../hive/service/cli/thrift/TBoolValue.java     |     2 +-
 .../hive/service/cli/thrift/TByteColumn.java    |     2 +-
 .../hive/service/cli/thrift/TByteValue.java     |     2 +-
 .../hive/service/cli/thrift/TCLIService.java    |     2 +-
 .../cli/thrift/TCancelDelegationTokenReq.java   |     2 +-
 .../cli/thrift/TCancelDelegationTokenResp.java  |     2 +-
 .../service/cli/thrift/TCancelOperationReq.java |     2 +-
 .../cli/thrift/TCancelOperationResp.java        |     2 +-
 .../service/cli/thrift/TCloseOperationReq.java  |     2 +-
 .../service/cli/thrift/TCloseOperationResp.java |     2 +-
 .../service/cli/thrift/TCloseSessionReq.java    |     2 +-
 .../service/cli/thrift/TCloseSessionResp.java   |     2 +-
 .../hive/service/cli/thrift/TColumnDesc.java    |     2 +-
 .../hive/service/cli/thrift/TDoubleColumn.java  |     2 +-
 .../hive/service/cli/thrift/TDoubleValue.java   |     2 +-
 .../cli/thrift/TExecuteStatementReq.java        |     2 +-
 .../cli/thrift/TExecuteStatementResp.java       |     2 +-
 .../service/cli/thrift/TFetchResultsReq.java    |     2 +-
 .../service/cli/thrift/TFetchResultsResp.java   |     2 +-
 .../service/cli/thrift/TGetCatalogsReq.java     |     2 +-
 .../service/cli/thrift/TGetCatalogsResp.java    |     2 +-
 .../hive/service/cli/thrift/TGetColumnsReq.java |     2 +-
 .../service/cli/thrift/TGetColumnsResp.java     |     2 +-
 .../cli/thrift/TGetDelegationTokenReq.java      |     2 +-
 .../cli/thrift/TGetDelegationTokenResp.java     |     2 +-
 .../service/cli/thrift/TGetFunctionsReq.java    |     2 +-
 .../service/cli/thrift/TGetFunctionsResp.java   |     2 +-
 .../hive/service/cli/thrift/TGetInfoReq.java    |     2 +-
 .../hive/service/cli/thrift/TGetInfoResp.java   |     2 +-
 .../cli/thrift/TGetOperationStatusReq.java      |     2 +-
 .../cli/thrift/TGetOperationStatusResp.java     |     2 +-
 .../cli/thrift/TGetResultSetMetadataReq.java    |     2 +-
 .../cli/thrift/TGetResultSetMetadataResp.java   |     2 +-
 .../hive/service/cli/thrift/TGetSchemasReq.java |     2 +-
 .../service/cli/thrift/TGetSchemasResp.java     |     2 +-
 .../service/cli/thrift/TGetTableTypesReq.java   |     2 +-
 .../service/cli/thrift/TGetTableTypesResp.java  |     2 +-
 .../hive/service/cli/thrift/TGetTablesReq.java  |     2 +-
 .../hive/service/cli/thrift/TGetTablesResp.java |     2 +-
 .../service/cli/thrift/TGetTypeInfoReq.java     |     2 +-
 .../service/cli/thrift/TGetTypeInfoResp.java    |     2 +-
 .../service/cli/thrift/THandleIdentifier.java   |     2 +-
 .../hive/service/cli/thrift/TI16Column.java     |     2 +-
 .../hive/service/cli/thrift/TI16Value.java      |     2 +-
 .../hive/service/cli/thrift/TI32Column.java     |     2 +-
 .../hive/service/cli/thrift/TI32Value.java      |     2 +-
 .../hive/service/cli/thrift/TI64Column.java     |     2 +-
 .../hive/service/cli/thrift/TI64Value.java      |     2 +-
 .../hive/service/cli/thrift/TMapTypeEntry.java  |     2 +-
 .../service/cli/thrift/TOpenSessionReq.java     |     2 +-
 .../service/cli/thrift/TOpenSessionResp.java    |     2 +-
 .../service/cli/thrift/TOperationHandle.java    |     2 +-
 .../service/cli/thrift/TPrimitiveTypeEntry.java |     2 +-
 .../cli/thrift/TRenewDelegationTokenReq.java    |     2 +-
 .../cli/thrift/TRenewDelegationTokenResp.java   |     2 +-
 .../apache/hive/service/cli/thrift/TRow.java    |     2 +-
 .../apache/hive/service/cli/thrift/TRowSet.java |     2 +-
 .../hive/service/cli/thrift/TSessionHandle.java |     2 +-
 .../apache/hive/service/cli/thrift/TStatus.java |     2 +-
 .../hive/service/cli/thrift/TStringColumn.java  |     2 +-
 .../hive/service/cli/thrift/TStringValue.java   |     2 +-
 .../service/cli/thrift/TStructTypeEntry.java    |     2 +-
 .../hive/service/cli/thrift/TTableSchema.java   |     2 +-
 .../hive/service/cli/thrift/TTypeDesc.java      |     2 +-
 .../service/cli/thrift/TTypeQualifiers.java     |     2 +-
 .../service/cli/thrift/TUnionTypeEntry.java     |     2 +-
 .../cli/thrift/TUserDefinedTypeEntry.java       |     2 +-
 .../gen-py/hive_service/ThriftHive-remote       |    49 +-
 .../auth/LdapAuthenticationProviderImpl.java    |    82 +-
 .../org/apache/hive/service/cli/Column.java     |     2 +-
 .../thrift/RetryingThriftCLIServiceClient.java  |   331 +
 .../org/apache/hive/service/cli/TestColumn.java |   129 +
 .../cli/TestRetryingThriftCLIServiceClient.java |   133 +
 .../hadoop/hive/shims/Hadoop20SShims.java       |     5 +-
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |     4 +-
 .../hive/thrift/HadoopThriftAuthBridge.java     |     3 +
 .../hive/ql/io/sarg/SearchArgumentFactory.java  |     5 +-
 .../hive/ql/io/sarg/SearchArgumentImpl.java     |     7 +-
 testutils/ptest2/src/main/resources/log4j2.xml  |     5 +-
 721 files changed, 90748 insertions(+), 16405 deletions(-)
----------------------------------------------------------------------



[2/3] hive git commit: HIVE-11783: Extending HPL/SQL parser (Dmitry Tolpeko reviewed by Alan Gates)

Posted by dm...@apache.org.
HIVE-11783: Extending HPL/SQL parser (Dmitry Tolpeko reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/06790789
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/06790789
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/06790789

Branch: refs/heads/master
Commit: 06790789bddb35f60706071d8d3682d434fa05dd
Parents: d51c62a
Author: Dmitry Tolpeko <dm...@gmail.com>
Authored: Tue Sep 22 06:38:06 2015 -0700
Committer: Dmitry Tolpeko <dm...@gmail.com>
Committed: Tue Sep 22 06:38:06 2015 -0700

----------------------------------------------------------------------
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4     |  70 ++--
 .../main/java/org/apache/hive/hplsql/Cmp.java   | 314 ++++++++++++++++++
 .../java/org/apache/hive/hplsql/Column.java     |  29 +-
 .../main/java/org/apache/hive/hplsql/Conn.java  |  21 ++
 .../main/java/org/apache/hive/hplsql/Copy.java  |  50 ++-
 .../main/java/org/apache/hive/hplsql/Exec.java  |  66 +++-
 .../java/org/apache/hive/hplsql/Expression.java |  33 +-
 .../main/java/org/apache/hive/hplsql/File.java  |  18 +-
 .../main/java/org/apache/hive/hplsql/Meta.java  |  28 +-
 .../main/java/org/apache/hive/hplsql/Query.java |  18 ++
 .../java/org/apache/hive/hplsql/Select.java     |  23 +-
 .../main/java/org/apache/hive/hplsql/Stmt.java  |   8 +-
 .../main/java/org/apache/hive/hplsql/Var.java   | 110 ++++++-
 .../apache/hive/hplsql/functions/Function.java  |   6 +-
 .../hive/hplsql/functions/FunctionMisc.java     | 121 +++++++
 .../org/apache/hive/hplsql/TestHplsqlLocal.java |  18 ++
 .../apache/hive/hplsql/TestHplsqlOffline.java   |   5 +
 hplsql/src/test/queries/db/cmp_row_count.sql    |   4 +
 hplsql/src/test/queries/db/cmp_sum.sql          |   3 +
 hplsql/src/test/queries/db/copy_to_file.sql     |   2 +
 hplsql/src/test/queries/db/copy_to_hdfs.sql     |   2 +
 hplsql/src/test/queries/db/copy_to_table.sql    |   2 +
 hplsql/src/test/queries/db/part_count.sql       |  17 +
 hplsql/src/test/queries/db/part_count_by.sql    |   4 +
 hplsql/src/test/queries/db/schema.sql           |  32 ++
 hplsql/src/test/queries/db/select_into.sql      |  20 +-
 hplsql/src/test/queries/db/select_into2.sql     |  17 +
 .../test/queries/local/create_procedure2.sql    |  16 +
 hplsql/src/test/queries/local/if2.sql           |   5 +
 hplsql/src/test/queries/local/include.sql       |   2 +
 hplsql/src/test/queries/local/include_file.sql  |   1 +
 hplsql/src/test/queries/local/mult_div.sql      |   8 +
 hplsql/src/test/queries/offline/select_db2.sql  |   5 +
 .../src/test/results/db/cmp_row_count.out.txt   |  12 +
 hplsql/src/test/results/db/cmp_sum.out.txt      | 320 +++++++++++++++++++
 hplsql/src/test/results/db/copy_to_file.out.txt |   6 +
 hplsql/src/test/results/db/copy_to_hdfs.out.txt |   4 +
 .../src/test/results/db/copy_to_table.out.txt   |   2 +
 hplsql/src/test/results/db/part_count.out.txt   |  15 +
 .../src/test/results/db/part_count_by.out.txt   |  13 +
 hplsql/src/test/results/db/select_into.out.txt  |  58 +++-
 hplsql/src/test/results/db/select_into2.out.txt |  19 ++
 .../results/local/create_procedure2.out.txt     |  10 +
 hplsql/src/test/results/local/if2.out.txt       |   4 +
 hplsql/src/test/results/local/include.out.txt   |   8 +
 hplsql/src/test/results/local/mult_div.out.txt  |   7 +
 .../src/test/results/offline/select_db2.out.txt |   6 +
 47 files changed, 1471 insertions(+), 91 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
----------------------------------------------------------------------
diff --git a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
index ff772fe..bbe7276 100644
--- a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
+++ b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
@@ -43,6 +43,7 @@ stmt :
      | break_stmt
      | call_stmt
      | close_stmt
+     | cmp_stmt
      | copy_from_local_stmt
      | copy_stmt
      | commit_stmt
@@ -183,7 +184,7 @@ declare_handler_item :     // Condition handler declaration
      ;
      
 declare_temporary_table_item :     // DECLARE TEMPORARY TABLE statement
-       T_GLOBAL? T_TEMPORARY T_TABLE ident T_OPEN_P create_table_columns T_CLOSE_P create_table_options?
+       T_GLOBAL? T_TEMPORARY T_TABLE ident (T_AS? T_OPEN_P select_stmt T_CLOSE_P | T_AS? select_stmt | T_OPEN_P create_table_columns T_CLOSE_P) create_table_options?
      ;
      
 create_table_stmt :
@@ -252,6 +253,7 @@ create_table_options_db2_item :
      | T_DISTRIBUTE T_BY T_HASH T_OPEN_P ident (T_COMMA ident)* T_CLOSE_P
      | T_LOGGED 
      | T_NOT T_LOGGED
+     | T_DEFINITION T_ONLY
      ;
      
 create_table_options_hive_item :
@@ -283,6 +285,7 @@ dtype :                  // Data types
      | T_DATETIME
      | T_DEC
      | T_DECIMAL
+     | T_DOUBLE T_PRECISION?
      | T_FLOAT
      | T_INT
      | T_INTEGER
@@ -377,7 +380,7 @@ else_block :
      ;
      
 include_stmt :          // INCLUDE statement
-       T_INCLUDE file_name
+       T_INCLUDE (file_name | expr)
      ;  
      
 insert_stmt :           // INSERT statement
@@ -418,7 +421,7 @@ get_diag_stmt_rowcount_item :
      ;
      
 grant_stmt :            
-       T_GRANT grant_stmt_item (T_COMMA grant_stmt_item)* T_TO ident
+       T_GRANT grant_stmt_item (T_COMMA grant_stmt_item)* T_TO T_ROLE ident
      ;
      
 grant_stmt_item :
@@ -445,12 +448,20 @@ close_stmt :            // CLOSE cursor statement
        T_CLOSE L_ID
      ;
      
+cmp_stmt :              // CMP statement
+       T_CMP (T_ROW_COUNT | T_SUM) cmp_source T_COMMA cmp_source
+     ;
+     
+cmp_source :
+      (table_name where_clause? | T_OPEN_P select_stmt T_CLOSE_P) (T_AT ident)?
+     ;
+     
 copy_from_local_stmt :  // COPY FROM LOCAL statement
        T_COPY T_FROM T_LOCAL copy_source (T_COMMA copy_source)* T_TO copy_target copy_file_option*
      ;
      
 copy_stmt :             // COPY statement
-       T_COPY (table_name | T_OPEN_P select_stmt T_CLOSE_P) T_TO copy_target copy_option*
+       T_COPY (table_name | T_OPEN_P select_stmt T_CLOSE_P) T_TO T_HDFS? copy_target copy_option*
      ;
      
 copy_source :
@@ -458,7 +469,7 @@ copy_source :
      ;
 
 copy_target :
-       (ident | expr | L_FILE)
+       (file_name | expr) 
      ;
     
 copy_option :
@@ -615,7 +626,7 @@ select_list_item :
      ;
      
 select_list_alias :
-       {!_input.LT(1).getText().equalsIgnoreCase("FROM")}? T_AS? ident
+       {!_input.LT(1).getText().equalsIgnoreCase("INTO") && !_input.LT(1).getText().equalsIgnoreCase("FROM")}? T_AS? ident
      | T_OPEN_P T_TITLE L_S_STRING T_CLOSE_P
      ;
      
@@ -642,7 +653,7 @@ from_table_name_clause :
      ;     
 
 from_subselect_clause :
-       T_OPEN_P subselect_stmt T_CLOSE_P from_alias_clause?
+       T_OPEN_P select_stmt T_CLOSE_P from_alias_clause?
      ;
      
 from_join_clause :
@@ -669,7 +680,8 @@ from_alias_clause :
         !_input.LT(1).getText().equalsIgnoreCase("EXECUTE") && 
         !_input.LT(1).getText().equalsIgnoreCase("GROUP") &&
         !_input.LT(1).getText().equalsIgnoreCase("ORDER") &&
-        !_input.LT(1).getText().equalsIgnoreCase("LIMIT")}?
+        !_input.LT(1).getText().equalsIgnoreCase("LIMIT") &&
+        !_input.LT(1).getText().equalsIgnoreCase("WITH")}?
        T_AS? ident (T_OPEN_P L_ID (T_COMMA L_ID)* T_CLOSE_P)? 
      ;
      
@@ -699,7 +711,7 @@ select_options :
 
 select_options_item :
        T_LIMIT expr
-     | T_WITH (T_RR | T_RS | T_CS | T_UR)
+     | T_WITH (T_RR | T_RS | T_CS | T_UR) (T_USE T_AND T_KEEP (T_EXCLUSIVE | T_UPDATE | T_SHARE) T_LOCKS)?
      ;
 
 update_stmt :                              // UPDATE statement
@@ -738,7 +750,7 @@ delete_stmt :                             // DELETE statement
      ;
      
 bool_expr :                               // Boolean condition
-       T_OPEN_P bool_expr T_CLOSE_P 
+       T_NOT? T_OPEN_P bool_expr T_CLOSE_P 
      | bool_expr bool_expr_logical_operator bool_expr 
      | bool_expr_atom
      ;
@@ -900,6 +912,7 @@ expr_spec_func :
      | T_MIN_PART_INT T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P 
      | T_MAX_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P 
      | T_MIN_PART_DATE T_OPEN_P expr (T_COMMA expr (T_COMMA expr T_EQUAL expr)*)? T_CLOSE_P 
+     | T_PART_COUNT T_OPEN_P expr (T_COMMA expr T_EQUAL expr)* T_CLOSE_P 
      | T_PART_LOC T_OPEN_P expr (T_COMMA expr T_EQUAL expr)+ (T_COMMA expr)? T_CLOSE_P 
      | T_TRIM T_OPEN_P expr T_CLOSE_P
      | T_SUBSTRING T_OPEN_P expr T_FROM expr (T_FOR expr)? T_CLOSE_P
@@ -946,7 +959,7 @@ host_stmt :
      ;
      
 file_name :
-       L_ID | L_FILE
+       L_FILE | '/'? ident ('/' ident)*
      ;
      
 date_literal :                             // DATE 'YYYY-MM-DD' literal
@@ -1012,6 +1025,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_CLIENT     
      | T_CLOSE 
      | T_CLUSTERED
+     | T_CMP
      | T_COLLECTION     
      | T_COPY
      | T_COMMIT
@@ -1043,6 +1057,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_DEFERRED
      | T_DEFINED
      | T_DEFINER
+     | T_DEFINITION
      | T_DELETE
      | T_DELIMITED
      | T_DELIMITER
@@ -1051,7 +1066,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_DIAGNOSTICS
      | T_DISTINCT 
      | T_DISTRIBUTE
-     | T_DO         
+     | T_DO        
+     | T_DOUBLE     
      | T_DROP    
      | T_DYNAMIC      
      // T_ELSE reserved word         
@@ -1062,7 +1078,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_EXCEPT       
      | T_EXEC         
      | T_EXECUTE      
-     | T_EXCEPTION    
+     | T_EXCEPTION  
+     | T_EXCLUSIVE     
      | T_EXISTS
      | T_EXIT         
      | T_FETCH  
@@ -1085,6 +1102,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_HANDLER      
      | T_HASH
      | T_HAVING       
+     | T_HDFS
      | T_HIVE         
      | T_HOST    
      | T_IDENTITY     
@@ -1106,7 +1124,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_ITEMS     
      | T_IS    
      | T_ISOPEN
-     | T_JOIN     
+     | T_JOIN  
+     | T_KEEP     
      | T_KEY
      | T_KEYS
      | T_LAG
@@ -1121,6 +1140,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_LOCAL     
      | T_LOCATOR
      | T_LOCATORS
+     | T_LOCKS
      | T_LOGGED    
      | T_LOGGING     
      | T_LOOP    
@@ -1157,10 +1177,12 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_OVER
      | T_OVERWRITE
      | T_OWNER
+     | T_PART_COUNT
      | T_PART_LOC 
      | T_PARTITION  
      | T_PCTFREE
-     | T_PCTUSED     
+     | T_PCTUSED  
+     | T_PRECISION     
      | T_PRESERVE
      | T_PRIMARY
      | T_PRINT 
@@ -1181,7 +1203,8 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_REVERSE    
      | T_RIGHT
      | T_RLIKE
-     | T_RS     
+     | T_RS 
+     | T_ROLE     
      | T_ROLLBACK
      | T_ROW
      | T_ROWS
@@ -1194,6 +1217,7 @@ non_reserved_words :                      // Tokens that are not reserved words
      | T_SELECT       
      | T_SET 
      | T_SETS     
+     | T_SHARE
      | T_SIGNAL
      | T_SMALLDATETIME
      | T_SMALLINT     
@@ -1277,6 +1301,7 @@ T_CHARACTER       : C H A R A C T E R ;
 T_CLIENT          : C L I E N T ;
 T_CLOSE           : C L O S E ;
 T_CLUSTERED       : C L U S T E R E D;
+T_CMP             : C M P ; 
 T_COLLECTION      : C O L L E C T I O N ; 
 T_COPY            : C O P Y ;
 T_COMMIT          : C O M M I T ; 
@@ -1304,6 +1329,7 @@ T_DEFAULT         : D E F A U L T ;
 T_DEFERRED        : D E F E R R E D ; 
 T_DEFINED         : D E F I N E D ; 
 T_DEFINER         : D E F I N E R ;
+T_DEFINITION      : D E F I N I T I O N ; 
 T_DELETE          : D E L E T E ;
 T_DELIMITED       : D E L I M I T E D ; 
 T_DELIMITER       : D E L I M I T E R ; 
@@ -1312,6 +1338,7 @@ T_DIAGNOSTICS     : D I A G N O S T I C S ;
 T_DISTINCT        : D I S T I N C T ;
 T_DISTRIBUTE      : D I S T R I B U T E ;
 T_DO              : D O ;
+T_DOUBLE          : D O U B L E ;
 T_DROP            : D R O P ;
 T_DYNAMIC         : D Y N A M I C ; 
 T_ELSE            : E L S E ;
@@ -1323,6 +1350,7 @@ T_EXCEPT          : E X C E P T ;
 T_EXEC            : E X E C ;
 T_EXECUTE         : E X E C U T E ;
 T_EXCEPTION       : E X C E P T I O N ;
+T_EXCLUSIVE       : E X C L U S I V E ; 
 T_EXISTS          : E X I S T S ; 
 T_EXIT            : E X I T ;
 T_FETCH           : F E T C H ;
@@ -1344,6 +1372,7 @@ T_GROUP           : G R O U P ;
 T_HANDLER         : H A N D L E R ;
 T_HASH            : H A S H ;
 T_HAVING          : H A V I N G ;
+T_HDFS            : H D F S ; 
 T_HIVE            : H I V E ;
 T_HOST            : H O S T ;
 T_IDENTITY        : I D E N T I T Y ; 
@@ -1366,6 +1395,7 @@ T_IS              : I S ;
 T_ISOPEN          : I S O P E N ;
 T_ITEMS           : I T E M S ; 
 T_JOIN            : J O I N ;
+T_KEEP            : K E E P; 
 T_KEY             : K E Y ;
 T_KEYS            : K E Y S ;
 T_LANGUAGE        : L A N G U A G E ;
@@ -1377,6 +1407,7 @@ T_LINES           : L I N E S ;
 T_LOCAL           : L O C A L ;
 T_LOCATOR         : L O C A T O R ; 
 T_LOCATORS        : L O C A T O R S ; 
+T_LOCKS           : L O C K S ; 
 T_LOGGED          : L O G G E D ; 
 T_LOGGING         : L O G G I N G ; 
 T_LOOP            : L O O P ;
@@ -1416,6 +1447,7 @@ T_OWNER           : O W N E R ;
 T_PARTITION       : P A R T I T I O N ; 
 T_PCTFREE         : P C T F R E E ; 
 T_PCTUSED         : P C T U S E D ;
+T_PRECISION       : P R E C I S I O N ; 
 T_PRESERVE        : P R E S E R V E ; 
 T_PRIMARY         : P R I M A R Y ;
 T_PRINT           : P R I N T ; 
@@ -1434,6 +1466,7 @@ T_RETURNS         : R E T U R N S ;
 T_REVERSE         : R E V E R S E ;
 T_RIGHT           : R I G H T ;
 T_RLIKE           : R L I K E ;
+T_ROLE            : R O L E ;
 T_ROLLBACK        : R O L L B A C K ;
 T_ROW             : R O W ; 
 T_ROWS            : R O W S ; 
@@ -1449,6 +1482,7 @@ T_SEL             : S E L ;
 T_SELECT          : S E L E C T ; 
 T_SET             : S E T ;
 T_SETS            : S E T S;
+T_SHARE           : S H A R E ; 
 T_SIGNAL          : S I G N A L ;
 T_SMALLDATETIME   : S M A L L D A T E T I M E ;
 T_SMALLINT        : S M A L L I N T ;
@@ -1513,6 +1547,7 @@ T_MAX_PART_INT         : M A X '_' P A R T '_' I N T ;
 T_MIN_PART_INT         : M I N '_' P A R T '_' I N T ;
 T_MAX_PART_DATE        : M A X '_' P A R T '_' D A T E ;
 T_MIN_PART_DATE        : M I N '_' P A R T '_' D A T E ;
+T_PART_COUNT           : P A R T '_' C O U N T ; 
 T_PART_LOC             : P A R T '_' L O C ;
 T_RANK                 : R A N K ;
 T_ROW_NUMBER           : R O W '_' N U M B E R;
@@ -1566,8 +1601,7 @@ L_WS        : L_BLANK+ -> skip ;                                       // Whites
 L_M_COMMENT : '/*' .*? '*/' -> channel(HIDDEN) ;                       // Multiline comment
 L_S_COMMENT : ('--' | '//')  .*? '\r'? '\n' -> channel(HIDDEN) ;       // Single line comment
 
-L_FILE      : '/'? L_ID ('/' L_ID)*                                    // File path
-            | ([a-zA-Z] ':' '\\'?)? L_ID ('\\' L_ID)*
+L_FILE      : ([a-zA-Z] ':' '\\'?)? L_ID ('\\' L_ID)*                  // File path (a/b/c Linux path causes conflicts with division operator and handled at parser level)
             ; 
 
 L_LABEL     : ([a-zA-Z] | L_DIGIT | '_')* ':'            

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java b/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java
new file mode 100644
index 0000000..ee65a88
--- /dev/null
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Cmp.java
@@ -0,0 +1,314 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.hplsql;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.util.ArrayList;
+
+import org.antlr.v4.runtime.ParserRuleContext;
+
+public class Cmp implements Runnable {
+  
+  Exec exec;
+  Timer timer = new Timer();  
+  boolean trace = false;
+  boolean info = false;
+  
+  Query query;
+  String conn;
+  HplsqlParser.Cmp_stmtContext ctx;
+  
+  int tests = 0;
+  int failedTests = 0;
+  int failedTestsHighDiff = 0;
+  
+  Cmp(Exec e) {
+    exec = e;  
+    trace = exec.getTrace();
+    info = exec.getInfo();
+  }
+  
+  Cmp(Exec e, HplsqlParser.Cmp_stmtContext c, Query q, String cn) {
+    exec = e;  
+    trace = exec.getTrace();
+    info = exec.getInfo();
+    ctx = c;
+    query = q;
+    conn = cn;
+  }
+  
+  /**
+   * Run CMP command
+   */
+  Integer run(HplsqlParser.Cmp_stmtContext ctx) {
+    trace(ctx, "CMP");
+    this.ctx = ctx;
+    timer.start();
+    StringBuilder conn1 = new StringBuilder();
+    StringBuilder conn2 = new StringBuilder();
+    Query query1 = new Query();
+    Query query2 = new Query();
+    Boolean equal = null;
+    try {
+      String sql1 = getSql(ctx, conn1, 0);
+      String sql2 = getSql(ctx, conn2, 1);
+      if (trace) {
+        trace(ctx, "Query 1: " + sql1);
+        trace(ctx, "Query 2: " + sql2);
+      }
+      query1.setSql(sql1);
+      query2.setSql(sql2);
+      Cmp cmp1 = new Cmp(exec, ctx, query1, conn1.toString());
+      Cmp cmp2 = new Cmp(exec, ctx, query2, conn2.toString());
+      Thread t1 = new Thread(cmp1);
+      Thread t2 = new Thread(cmp2);
+      t1.start();
+      t2.start();
+      t1.join();
+      t2.join();
+      equal = compare(query1, query2);      
+    }
+    catch(Exception e) {
+      exec.signal(e);
+      return -1;
+    }
+    finally {
+      long elapsed = timer.stop();
+      if (info) {
+        String message = "CMP ";
+        if (equal != null) {
+          if (equal) {
+            message += "Equal, " + tests + " tests";
+          }
+          else {
+            message += "Not Equal, " + failedTests + " of " + tests + " tests failed";
+            message += ", " + failedTestsHighDiff + " failed tests with more than 0.01% difference";
+          }
+        }
+        else {
+          message += "Failed";
+        }
+        info(ctx, message + ", " + timer.format());
+      }
+      exec.closeQuery(query1, conn1.toString());
+      exec.closeQuery(query2, conn2.toString());
+    }
+    return 0;
+  }
+  
+  /**
+   * Get data for comparison from the source
+   */
+  public void run() {
+    exec.executeQuery(ctx, query, conn);
+  }
+  
+  /**
+   * Compare the results
+   */
+  Boolean compare(Query query1, Query query2) {
+    if (query1.error()) { 
+      exec.signal(query1);
+      return null;
+    }
+    else if (query2.error()) { 
+      exec.signal(query2);
+      return null;
+    }
+    ResultSet rs1 = query1.getResultSet();
+    ResultSet rs2 = query2.getResultSet();
+    if (rs1 == null || rs2 == null) {
+      exec.setSqlCode(-1);
+      return null;
+    }
+    boolean equal = true;
+    tests = 0;
+    failedTests = 0;
+    try {
+      ResultSetMetaData rm1 = rs1.getMetaData();
+      ResultSetMetaData rm2 = rs2.getMetaData();
+      int cnt1 = rm1.getColumnCount();
+      int cnt2 = rm2.getColumnCount();
+      tests = cnt1;
+      while (rs1.next() && rs2.next()) {
+        for (int i = 1; i <= tests; i++) {
+          Var v1 = new Var(Var.Type.DERIVED_TYPE);
+          Var v2 = new Var(Var.Type.DERIVED_TYPE);
+          v1.setValue(rs1, rm1, i);
+          if (i <= cnt2) {
+            v2.setValue(rs2, rm2, i);
+          }
+          boolean e = true;
+          if (!(v1.isNull() && v2.isNull()) && !v1.equals(v2)) {
+            equal = false;
+            e = false;
+            failedTests++;
+          }
+          if (trace || info) {
+            String m = rm1.getColumnName(i) + "\t" + v1.toString() + "\t" + v2.toString();
+            if (!e) {
+              m += "\tNot equal";
+              BigDecimal diff = v1.percentDiff(v2);
+              if (diff != null) {
+                if (diff.compareTo(BigDecimal.ZERO) != 0) {
+                  m += ", " + diff + "% difference";
+                  failedTestsHighDiff++;
+                }
+                else {
+                  m += ", less then 0.01% difference";
+                }
+              }
+              else {
+                failedTestsHighDiff++;
+              }
+            }
+            if (trace) {
+              trace(null, m);
+            }
+            else {
+              info(null, m);
+            }
+          }
+        }
+        if (equal) {
+          exec.setSqlSuccess();
+        }
+        else {
+          exec.setSqlCode(1);
+        }
+      }
+    }
+    catch(Exception e) {
+      exec.signal(e);
+      return null;
+    }
+    return new Boolean(equal);
+  }
+  
+  /**
+   * Define the SQL query to access data
+   */
+  String getSql(HplsqlParser.Cmp_stmtContext ctx, StringBuilder conn, int idx) throws Exception {
+    StringBuilder sql = new StringBuilder();
+    String table = null;
+    String query = null;
+    if (ctx.cmp_source(idx).table_name() != null) {
+      table = evalPop(ctx.cmp_source(idx).table_name()).toString();
+    }
+    else {
+      query = evalPop(ctx.cmp_source(idx).select_stmt()).toString();
+    }
+    if (ctx.cmp_source(idx).T_AT() != null) {
+      conn.append(ctx.cmp_source(idx).ident().getText());
+    }
+    else if (table != null) {
+      conn.append(exec.getObjectConnection(ctx.cmp_source(idx).table_name().getText()));
+    }  
+    else {
+      conn.append(exec.getStatementConnection());
+    }
+    sql.append("SELECT ");
+    sql.append(getSelectList(ctx, conn.toString(), table, query));
+    sql.append(" FROM ");
+    if (table != null) {
+      sql.append(table);
+      if (ctx.cmp_source(idx).where_clause() != null) {
+        sql.append(" " + evalPop(ctx.cmp_source(idx).where_clause()).toString());
+      }
+    }
+    else {
+      sql.append("(");
+      sql.append(query);
+      sql.append(") t");
+    }
+    return sql.toString();
+  }
+  
+  /**
+   * Define SELECT listto access data
+   */
+  String getSelectList(HplsqlParser.Cmp_stmtContext ctx, String conn, String table, String query) throws Exception {
+    StringBuilder sql = new StringBuilder();
+    sql.append("COUNT(1) AS row_count");
+    if (ctx.T_SUM() != null && table != null) {
+      Row row = exec.meta.getRowDataType(ctx, conn, table);
+      if (row != null) {
+        ArrayList<Column> cols = row.getColumns();
+        int cnt = row.size();
+        sql.append(",\n");
+        for (int i = 0; i < cnt; i++) {
+          Column col = cols.get(i);
+          String name = col.getName();
+          Var.Type type = Var.defineType(col.getType());
+          sql.append("COUNT(" + name + ") AS " + name + "_COUNT_NOT_NULL");
+          if (type == Var.Type.STRING) {
+            sql.append(",\n");
+            sql.append("SUM(LENGTH(" + name + ")) AS " + name + "_SUM_LENGTH,\n");
+            sql.append("MIN(LENGTH(" + name + ")) AS " + name + "_MIN_LENGTH,\n");
+            sql.append("MAX(LENGTH(" + name + ")) AS " + name + "_MAX_LENGTH");
+          }
+          else if (type == Var.Type.BIGINT || type == Var.Type.DECIMAL || type == Var.Type.DOUBLE) {
+            sql.append(",\n");
+            sql.append("SUM(" + name + ") AS " + name + "_SUM,\n");
+            sql.append("MIN(" + name + ") AS " + name + "_MIN,\n");
+            sql.append("MAX(" + name + ") AS " + name + "_MAX");
+          }
+          else if (type == Var.Type.DATE || type == Var.Type.TIMESTAMP) {
+            sql.append(",\n");
+            sql.append("SUM(YEAR(" + name + ")) AS " + name + "_SUM_YEAR,\n");
+            sql.append("SUM(MONTH(" + name + ")) AS " + name + "_SUM_MONTH,\n");
+            sql.append("SUM(DAY(" + name + ")) AS " + name + "_SUM_DAY,\n");
+            sql.append("MIN(" + name + ") AS " + name + "_MIN,\n");
+            sql.append("MAX(" + name + ") AS " + name + "_MAX");
+          }
+          if (i + 1 < cnt) {
+            sql.append(",\n");
+          }
+        }
+      }
+    }
+    return sql.toString();
+  }
+  
+  /**
+   * Evaluate the expression and pop value from the stack
+   */
+  Var evalPop(ParserRuleContext ctx) {
+    exec.visit(ctx);
+    if (!exec.stack.isEmpty()) { 
+      return exec.stackPop();
+    }
+    return Var.Empty;
+  }
+
+  /**
+   * Trace and information
+   */
+  public void trace(ParserRuleContext ctx, String message) {
+    exec.trace(ctx, message);
+  }
+  
+  public void info(ParserRuleContext ctx, String message) {
+    exec.info(ctx, message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Column.java b/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
index 252a870..e4e914c 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Column.java
@@ -27,9 +27,36 @@ public class Column {
   String type;
   Var value;
   
+  int len;
+  int scale;
+  
   Column(String name, String type) {
     this.name = name;
-    this.type = type;
+    len = 0;
+    scale = 0;
+    setType(type);
+  }
+  
+  /**
+   * Set the column type with its length/precision
+   */
+  void setType(String type) {
+    int open = type.indexOf('(');
+    if (open == -1) {
+      this.type = type;
+    }
+    else {
+      this.type = type.substring(0, open);
+      int comma = type.indexOf(',', open);
+      int close = type.indexOf(')', open);
+      if (comma == -1) {
+        len = Integer.parseInt(type.substring(open + 1, close));
+      }
+      else {
+        len = Integer.parseInt(type.substring(open + 1, comma));
+        scale = Integer.parseInt(type.substring(comma + 1, close));
+      }
+    }
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java b/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
index c8cc910..12f43c9 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Conn.java
@@ -26,6 +26,7 @@ import java.sql.Connection;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.ResultSet;
+import java.sql.PreparedStatement;
 
 public class Conn {
  
@@ -76,6 +77,25 @@ public class Conn {
   }
   
   /**
+   * Prepare a SQL query
+   */
+  public Query prepareQuery(Query query, String connName) {
+    try {
+      Connection conn = getConnection(connName);
+      timer.start();
+      PreparedStatement stmt = conn.prepareStatement(query.sql);
+      timer.stop();
+      query.set(conn, stmt);      
+      if (info) {
+        exec.info(null, "Prepared statement executed successfully (" + timer.format() + ")");
+      }      
+    } catch (Exception e) {
+      query.setError(e);
+    }
+    return query;
+  }
+  
+  /**
    * Execute a SQL statement
    */
   public Query executeSql(String sql, String connName) {
@@ -117,6 +137,7 @@ public class Conn {
     if (sqls != null) {
       Statement s = conn.createStatement();
       for (String sql : sqls) {
+        exec.info(null, "Starting pre-SQL statement");
         s.execute(sql);
       }
       s.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
index 30b98ca..9968b24 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
@@ -26,6 +26,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.List;
 import java.io.FileOutputStream;
+import java.io.OutputStream;
 import java.io.IOException;
 
 import org.apache.hadoop.fs.FileSystem;
@@ -165,8 +166,8 @@ public class Copy {
     exec.returnConnection(targetConn, conn);
     exec.setRowCount(rows);
     long elapsed = timer.stop();
-    if (trace) {
-      trace(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/(elapsed/1000) + " rows/sec");
+    if (info) {
+      info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/(elapsed/1000) + " rows/sec");
     }
   }
   
@@ -192,16 +193,35 @@ public class Copy {
     byte[] nullstr = "NULL".getBytes();
     int cols = rm.getColumnCount();
     int rows = 0;
-    if (trace) {
-      trace(ctx, "SELECT executed: " + cols + " columns, output file: " + filename);
+    if (trace || info) {
+      String mes = "Query executed: " + cols + " columns, output file: " + filename;
+      if (trace) {
+        trace(ctx, mes);
+      }
+      else {
+        info(ctx, mes);
+      }
     } 
-    java.io.File file = new java.io.File(filename);
-    FileOutputStream out = null;
+    java.io.File file = null;
+    File hdfsFile = null;
+    if (ctx.T_HDFS() == null) {
+      file = new java.io.File(filename);
+    }
+    else {
+      hdfsFile = new File();
+    }     
+    OutputStream out = null;
+    timer.start();
     try {      
-      if (!file.exists()) {
-        file.createNewFile();
+      if (file != null) {
+        if (!file.exists()) {
+          file.createNewFile();
+        }
+        out = new FileOutputStream(file, false /*append*/);
+      }
+      else {
+        out = hdfsFile.create(filename, true /*overwrite*/);
       }
-      out = new FileOutputStream(file, false /*append*/);
       String col;
       String sql = "";
       if (sqlInsert) {
@@ -237,8 +257,9 @@ public class Copy {
         out.close();
       }
     }
-    if (trace) {
-      trace(ctx, "COPY rows: " + rows);
+    long elapsed = timer.stop();
+    if (info) {
+      info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", " + rows/elapsed/1000 + " rows/sec");
     }
   }
   
@@ -376,7 +397,12 @@ public class Copy {
       }
       else if (option.T_AT() != null) {
         targetConn = option.ident().getText();
-        sqlInsertName = ctx.copy_target().ident().getText();
+        if (ctx.copy_target().expr() != null) {
+          sqlInsertName = evalPop(ctx.copy_target().expr()).toString();
+        }
+        else {
+          sqlInsertName = ctx.copy_target().getText();
+        }
       }
       else if (option.T_BATCHSIZE() != null) {
         batchSize = evalPop(option.expr()).intValue();

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
index f5592e1..38b5380 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
@@ -506,6 +506,24 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
   public Query executeQuery(ParserRuleContext ctx, String sql, String connProfile) {
     return executeQuery(ctx, new Query(sql), connProfile);
   }
+  
+  /**
+   * Prepare a SQL query (SELECT)
+   */
+  public Query prepareQuery(ParserRuleContext ctx, Query query, String connProfile) {
+    if (!exec.offline) {
+      exec.rowCount = 0;
+      exec.conn.prepareQuery(query, connProfile);
+      return query;
+    }
+    setSqlNoData();
+    info(ctx, "Not executed - offline mode set");
+    return query;
+  }
+
+  public Query prepareQuery(ParserRuleContext ctx, String sql, String connProfile) {
+    return prepareQuery(ctx, new Query(sql), connProfile);
+  }
 
   /**
    * Execute a SQL statement 
@@ -950,6 +968,11 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
   }
   
   @Override 
+  public Integer visitFrom_subselect_clause(HplsqlParser.From_subselect_clauseContext ctx) { 
+    return exec.select.fromSubselect(ctx); 
+  }
+  
+  @Override 
   public Integer visitFrom_join_clause(HplsqlParser.From_join_clauseContext ctx) { 
     return exec.select.fromJoin(ctx); 
   }
@@ -1162,6 +1185,14 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
   }
   
   /**
+   * CMP statement
+   */
+  @Override 
+  public Integer visitCmp_stmt(HplsqlParser.Cmp_stmtContext ctx) { 
+    return new Cmp(exec).run(ctx); 
+  }
+  
+  /**
    * COPY statement
    */
   @Override 
@@ -1926,8 +1957,13 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    */
   @Override 
   public Integer visitDate_literal(HplsqlParser.Date_literalContext ctx) { 
-    String str = evalPop(ctx.string()).toString();
-    stackPush(new Var(Var.Type.DATE, Utils.toDate(str))); 
+    if (!exec.buildSql) {
+      String str = evalPop(ctx.string()).toString();
+      stackPush(new Var(Var.Type.DATE, Utils.toDate(str)));
+    }
+    else {
+      stackPush(getFormattedText(ctx));
+    }
     return 0; 
   }
 
@@ -1936,16 +1972,21 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    */
   @Override 
   public Integer visitTimestamp_literal(HplsqlParser.Timestamp_literalContext ctx) { 
-    String str = evalPop(ctx.string()).toString();
-    int len = str.length();
-    int precision = 0;
-    if (len > 19 && len <= 29) {
-      precision = len - 20;
-      if (precision > 3) {
-        precision = 3;
+    if (!exec.buildSql) {
+      String str = evalPop(ctx.string()).toString();
+      int len = str.length();
+      int precision = 0;
+      if (len > 19 && len <= 29) {
+        precision = len - 20;
+        if (precision > 3) {
+          precision = 3;
+        }
       }
+      stackPush(new Var(Utils.toTimestamp(str), precision));
+    }
+    else {
+      stackPush(getFormattedText(ctx));
     }
-    stackPush(new Var(Utils.toTimestamp(str), precision)); 
     return 0; 
   }
   
@@ -1979,6 +2020,9 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    * @throws Exception 
    */
   Connection getConnection(String conn) throws Exception {
+    if (conn == null || conn.equalsIgnoreCase("default")) {
+      conn = exec.conf.defaultConnection;
+    }
     return exec.conn.getConnection(conn);
   }
   
@@ -1993,7 +2037,7 @@ public class Exec extends HplsqlBaseVisitor<Integer> {
    * Define the database type by profile name
    */
   Conn.Type getConnectionType(String conn) {
-    return exec.conn.getType(conn);
+    return exec.conn.getTypeByProfile(conn);
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java b/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
index 7269798..7c500a8 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Expression.java
@@ -49,6 +49,9 @@ public class Expression {
       else if (ctx.T_SUB() != null) {
         operatorSub(ctx); 
       }
+      else if (ctx.T_MUL() != null) {
+        operatorMultiply(ctx); 
+      }
       else if (ctx.T_DIV() != null) {
         operatorDiv(ctx); 
       }
@@ -98,16 +101,17 @@ public class Expression {
    * Evaluate a boolean expression
    */
   public void execBool(HplsqlParser.Bool_exprContext ctx) {
-    if (ctx.T_OPEN_P() != null) {
-      eval(ctx.bool_expr(0));
-      return;
-    }
-    else if (ctx.bool_expr_atom() != null) {
+    if (ctx.bool_expr_atom() != null) {
       eval(ctx.bool_expr_atom());
       return;
     }
     Var result = evalPop(ctx.bool_expr(0));
-    if (ctx.bool_expr_logical_operator() != null) {
+    if (ctx.T_OPEN_P() != null) {
+      if (ctx.T_NOT() != null) {
+        result.negate();
+      }
+    }
+    else if (ctx.bool_expr_logical_operator() != null) {
       if (ctx.bool_expr_logical_operator().T_AND() != null) {
         if (result.isTrue()) {
           result = evalPop(ctx.bool_expr(1));
@@ -359,6 +363,23 @@ public class Expression {
   }
   
   /**
+   * Multiplication operator
+   */
+  public void operatorMultiply(HplsqlParser.ExprContext ctx) {
+    Var v1 = evalPop(ctx.expr(0));
+    Var v2 = evalPop(ctx.expr(1));
+    if (v1.value == null || v2.value == null) {
+      evalNull();
+    }
+    else if (v1.type == Type.BIGINT && v2.type == Type.BIGINT) {
+      exec.stackPush(new Var((Long)v1.value * (Long)v2.value)); 
+    }
+    else {
+      exec.signal(Signal.Type.UNSUPPORTED_OPERATION, "Unsupported data types in multiplication operator");
+    }
+  }
+  
+  /**
    * Division operator
    */
   public void operatorDiv(HplsqlParser.ExprContext ctx) {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/File.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/File.java b/hplsql/src/main/java/org/apache/hive/hplsql/File.java
index 6a8ddfe..e748772 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/File.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/File.java
@@ -46,17 +46,27 @@ public class File {
   /**
    * Create a file
    */
-  public void create(String dir, String file, boolean overwrite) {
-    path = new Path(dir, file);
+  public FSDataOutputStream create(boolean overwrite) {
     try {
       if (fs == null) {
-        fs = FileSystem.get(new Configuration());
+        fs = createFs();
       }
       out = fs.create(path, overwrite);
     } 
     catch (IOException e) {
       e.printStackTrace();
     }
+    return out;
+  }
+  
+  public FSDataOutputStream create(String dir, String file, boolean overwrite) {
+    path = new Path(dir, file);
+    return create(overwrite);
+  }
+
+  public FSDataOutputStream create(String file, boolean overwrite) {
+    path = new Path(file);
+    return create(overwrite);
   }
   
   /**
@@ -66,7 +76,7 @@ public class File {
    path = new Path(dir, file);
    try {
      if (fs == null) {
-       fs = FileSystem.get(new Configuration());
+       fs = createFs();
      }
      in = fs.open(path);
    } catch (IOException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java b/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
index 485bcdf..2e04ef9 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Meta.java
@@ -18,7 +18,9 @@
 
 package org.apache.hive.hplsql;
 
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
 import java.util.ArrayList;
 import java.util.HashMap;
 
@@ -90,12 +92,9 @@ public class Meta {
    */
   Row readColumns(ParserRuleContext ctx, String conn, String table, HashMap<String, Row> map) {
     Row row = null;
-    String sql = null;
     Conn.Type connType = exec.getConnectionType(conn); 
     if (connType == Conn.Type.HIVE) {
-      sql = "DESCRIBE " + table;
-    }
-    if (sql != null) {
+      String sql = "DESCRIBE " + table;
       Query query = new Query(sql);
       exec.executeQuery(ctx, query, conn); 
       if (!query.error()) {
@@ -115,6 +114,27 @@ public class Meta {
       }
       exec.closeQuery(query, conn);
     }
+    else {
+      Query query = exec.prepareQuery(ctx, "SELECT * FROM " + table, conn); 
+      if (!query.error()) {
+        try {
+          PreparedStatement stmt = query.getPreparedStatement();
+          ResultSetMetaData rm = stmt.getMetaData();
+          int cols = rm.getColumnCount();
+          for (int i = 1; i <= cols; i++) {
+            String col = rm.getColumnName(i);
+            String typ = rm.getColumnTypeName(i);
+            if (row == null) {
+              row = new Row();
+            }
+            row.addColumn(col.toUpperCase(), typ);
+          }
+          map.put(table, row);
+        }
+        catch (Exception e) {}
+      }
+      exec.closeQuery(query, conn);
+    }
     return row;
   }
   

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Query.java b/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
index 08cd6a7..e196f86 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Query.java
@@ -21,6 +21,7 @@ package org.apache.hive.hplsql;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.PreparedStatement;
 import java.sql.Statement;
 
 import org.antlr.v4.runtime.ParserRuleContext;
@@ -32,6 +33,7 @@ public class Query {
   
   Connection conn;
   Statement stmt;
+  PreparedStatement pstmt;
   ResultSet rs;
   Exception exception;
 
@@ -59,6 +61,11 @@ public class Query {
     }
   }
   
+  public void set(Connection conn, PreparedStatement pstmt) {
+    this.conn = conn;
+    this.pstmt = pstmt;
+  }
+  
   /**
    * Set the fetch status
    */
@@ -132,6 +139,10 @@ public class Query {
         stmt.close();
         stmt = null;
       }
+      if(pstmt != null) {
+        pstmt.close();
+        pstmt = null;
+      }
       state = State.CLOSE;
     } catch (SQLException e) {
       e.printStackTrace();
@@ -190,6 +201,13 @@ public class Query {
   }
   
   /**
+   * Get the prepared statement object
+   */
+  public PreparedStatement getPreparedStatement() {
+    return pstmt;
+  }
+  
+  /**
    * Get the connection object
    */
   public Connection getConnection() {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Select.java b/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
index 71ca848..56fbb05 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Select.java
@@ -218,7 +218,10 @@ public class Select {
       sql.append(" " + getText(ctx.order_by_clause()));
     }
     if (ctx.select_options() != null) {
-      sql.append(" " + evalPop(ctx.select_options()));
+      Var opt = evalPop(ctx.select_options());
+      if (!opt.isNull()) {
+        sql.append(" " + opt.toString());
+      }
     }
     if (ctx.select_list().select_list_limit() != null) {
       sql.append(" LIMIT " + evalPop(ctx.select_list().select_list_limit().expr()));
@@ -281,6 +284,21 @@ public class Select {
     exec.stackPush(sql);
     return 0; 
   }
+  
+  /**
+   * Subselect in FROM
+   */
+  public Integer fromSubselect(HplsqlParser.From_subselect_clauseContext ctx) {     
+    StringBuilder sql = new StringBuilder();
+    sql.append("(");
+    sql.append(evalPop(ctx.select_stmt()).toString());
+    sql.append(")");
+    if (ctx.from_alias_clause() != null) {
+      sql.append(" ").append(exec.getText(ctx.from_alias_clause()));
+    }
+    exec.stackPush(sql);
+    return 0; 
+  }
  
   /**
    * JOIN clause in FROM
@@ -341,10 +359,13 @@ public class Select {
    * WHERE clause
    */
   public Integer where(HplsqlParser.Where_clauseContext ctx) { 
+    boolean oldBuildSql = exec.buildSql; 
+    exec.buildSql = true;
     StringBuilder sql = new StringBuilder();
     sql.append(ctx.T_WHERE().getText());
     sql.append(" " + evalPop(ctx.bool_expr()));
     exec.stackPush(sql);
+    exec.buildSql = oldBuildSql;
     return 0;
   }
   

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
index 6193f49..db9ea65 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Stmt.java
@@ -435,7 +435,13 @@ public class Stmt {
    * INCLUDE statement
    */
   public Integer include(HplsqlParser.Include_stmtContext ctx) {
-    String file = ctx.file_name().getText();
+    String file;
+    if (ctx.file_name() != null) {
+      file = ctx.file_name().getText();
+    }
+    else {
+      file = evalPop(ctx.expr()).toString();
+    }    
     trace(ctx, "INCLUDE " + file);
     exec.includeFile(file);
     return 0; 

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
index b31a14d..150e8b4 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Var.java
@@ -19,6 +19,7 @@
 package org.apache.hive.hplsql;
 
 import java.math.BigDecimal;
+import java.math.RoundingMode;
 import java.util.ArrayList;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
@@ -32,7 +33,7 @@ import java.sql.Timestamp;
 public class Var {
 
 	// Data types
-	public enum Type {BOOL, CURSOR, DATE, DECIMAL, DERIVED_TYPE, DERIVED_ROWTYPE, FILE, IDENT, BIGINT, INTERVAL, ROW, 
+	public enum Type {BOOL, CURSOR, DATE, DECIMAL, DERIVED_TYPE, DERIVED_ROWTYPE, DOUBLE, FILE, IDENT, BIGINT, INTERVAL, ROW, 
 	                  RS_LOCATOR, STRING, STRINGLIST, TIMESTAMP, NULL};
 	public static final String DERIVED_TYPE = "DERIVED%TYPE";
 	public static final String DERIVED_ROWTYPE = "DERIVED%ROWTYPE";
@@ -79,6 +80,11 @@ public class Var {
     this.value = value;
   }
   
+	public Var(Double value) {
+    this.type = Type.DOUBLE;
+    this.value = value;
+  }
+	
 	public Var(Date value) {
     this.type = Type.DATE;
     this.value = value;
@@ -169,6 +175,9 @@ public class Var {
 	    if (val.type == Type.BIGINT) {
 	      value = BigDecimal.valueOf(val.longValue());
 	    }
+	    else if (val.type == Type.DOUBLE) {
+	      value = BigDecimal.valueOf(val.doubleValue());
+	    }
 	  }
 	  else if (type == Type.DATE) {
 	    value = Utils.toDate(val.toString());
@@ -238,6 +247,9 @@ public class Var {
     else if (type == java.sql.Types.DECIMAL || type == java.sql.Types.NUMERIC) {
       cast(new Var(rs.getBigDecimal(idx)));
     }
+    else if (type == java.sql.Types.FLOAT || type == java.sql.Types.DOUBLE) {
+      cast(new Var(new Double(rs.getDouble(idx))));
+    }
     return this;
   }
   
@@ -287,6 +299,9 @@ public class Var {
     else if (type.equalsIgnoreCase("DEC") || type.equalsIgnoreCase("DECIMAL") || type.equalsIgnoreCase("NUMERIC")) {
       return Type.DECIMAL;
     }
+    else if (type.equalsIgnoreCase("FLOAT") || type.toUpperCase().startsWith("DOUBLE")) {
+      return Type.DOUBLE;
+    }
     else if (type.equalsIgnoreCase("DATE")) {
       return Type.DATE;
     }
@@ -332,34 +347,59 @@ public class Var {
     scale = 0;
 	}
 	
-	/*
+	/**
 	 * Compare values
 	 */
 	@Override
   public boolean equals(Object obj) {
-	  if (this == obj) {
+	  if (getClass() != obj.getClass()) {
+      return false;
+    }    
+	  Var var = (Var)obj;  
+	  if (this == var) {
       return true;
 	  }
-	  else if (obj == null || this.value == null) {
+	  else if (var == null || var.value == null || this.value == null) {
       return false;
     }
-	  else if (getClass() != obj.getClass()) {
-      return false;
-	  }
-	  
-    Var var = (Var)obj;    
-    if (type == Type.BIGINT && var.type == Type.BIGINT &&
-       ((Long)value).longValue() == ((Long)var.value).longValue()) {
-      return true;
+    if (type == Type.BIGINT) {
+      if (var.type == Type.BIGINT && ((Long)value).longValue() == ((Long)var.value).longValue()) {
+        return true;
+      }
+      else if (var.type == Type.DECIMAL) {
+        return equals((BigDecimal)var.value, (Long)value);
+      }
     }
     else if (type == Type.STRING && var.type == Type.STRING &&
             ((String)value).equals((String)var.value)) {
       return true;
     }
+    else if (type == Type.DECIMAL && var.type == Type.DECIMAL &&
+            ((BigDecimal)value).compareTo((BigDecimal)var.value) == 0) {
+      return true;
+    }
+    else if (type == Type.DOUBLE) {
+      if (var.type == Type.DOUBLE && ((Double)value).compareTo((Double)var.value) == 0) {
+        return true;
+      }
+      else if (var.type == Type.DECIMAL && ((Double)value).compareTo(((BigDecimal)var.value).doubleValue()) == 0) {
+        return true;
+      }
+    }
     return false;
 	}
+    
+  /**
+   * Check if variables of different data types are equal
+   */
+  public boolean equals(BigDecimal d, Long i) {
+    if (d.compareTo(new BigDecimal(i)) == 0) {
+      return true;
+    }
+    return false;
+  }
 	
-	/*
+	/**
    * Compare values
    */
   public int compareTo(Var v) {
@@ -377,6 +417,20 @@ public class Var {
     }
     return -1;
   }
+  
+  /**
+   * Calculate difference between values in percent
+   */
+  public BigDecimal percentDiff(Var var) {
+    BigDecimal d1 = new Var(Var.Type.DECIMAL).cast(this).decimalValue();
+    BigDecimal d2 = new Var(Var.Type.DECIMAL).cast(var).decimalValue();
+    if (d1 != null && d2 != null) {
+      if (d1.compareTo(BigDecimal.ZERO) != 0) {
+        return d1.subtract(d2).abs().multiply(new BigDecimal(100)).divide(d1, 2, RoundingMode.HALF_UP);
+      }
+    }
+    return null;
+  }
 	
 	 /**
    * Increment an integer value
@@ -417,6 +471,26 @@ public class Var {
     }
     return -1;
   }
+  
+  /**
+   * Return a decimal value
+   */
+  public BigDecimal decimalValue() {
+    if (type == Type.DECIMAL) {
+      return (BigDecimal)value;
+    }
+    return null;
+  }
+  
+  /**
+   * Return a double value
+   */
+  public double doubleValue() {
+    if (type == Type.DOUBLE) {
+      return ((Double)value).doubleValue();
+    }
+    return -1;
+  }
 	
 	/**
 	 * Return true/false for BOOL type
@@ -429,6 +503,16 @@ public class Var {
 	}
 	
 	/**
+	 * Negate the boolean value
+	 */
+	public void negate() {
+    if(type == Type.BOOL && value != null) {
+      boolean v = ((Boolean)value).booleanValue();
+      value = Boolean.valueOf(!v);
+    }
+  }
+	
+	/**
 	 * Check if the variable contains NULL
 	 */
 	public boolean isNull() {

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
index ae7acae..aa40a0a 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/Function.java
@@ -88,7 +88,7 @@ public class Function {
     if (trace && ctx.parent.parent instanceof HplsqlParser.Expr_stmtContext) {
       trace(ctx, "FUNC " + name);      
     }
-    FuncCommand func = map.get(name);    
+    FuncCommand func = map.get(name.toUpperCase());    
     if (func != null) {
       func.run(ctx);
     }    
@@ -693,6 +693,10 @@ public class Function {
     exec.stackPush(new Var(i)); 
   }
   
+  void evalInt(int i) {
+    evalInt(new Long(i));
+  }
+  
   /**
    * Evaluate the expression to specified Date value
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
----------------------------------------------------------------------
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
index e022024..091552f 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/functions/FunctionMisc.java
@@ -18,6 +18,12 @@
 
 package org.apache.hive.hplsql.functions;
 
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
 import org.apache.hive.hplsql.*;
 
 public class FunctionMisc extends Function {
@@ -34,11 +40,13 @@ public class FunctionMisc extends Function {
     f.map.put("DECODE", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { decode(ctx); }});
     f.map.put("NVL", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl(ctx); }});
     f.map.put("NVL2", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { nvl2(ctx); }});
+    f.map.put("PART_COUNT_BY", new FuncCommand() { public void run(HplsqlParser.Expr_func_paramsContext ctx) { partCountBy(ctx); }});
     
     f.specMap.put("ACTIVITY_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { activityCount(ctx); }});
     f.specMap.put("CAST", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { cast(ctx); }});
     f.specMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { current(ctx); }});
     f.specMap.put("CURRENT_USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }});
+    f.specMap.put("PART_COUNT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { partCount(ctx); }});
     f.specMap.put("USER", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentUser(ctx); }});
 
     f.specSqlMap.put("CURRENT", new FuncSpecCommand() { public void run(HplsqlParser.Expr_spec_funcContext ctx) { currentSql(ctx); }});
@@ -185,4 +193,117 @@ public class FunctionMisc extends Function {
       evalNull();
     }
   }
+  
+  /**
+   * PART_COUNT function
+   */
+  public void partCount(HplsqlParser.Expr_spec_funcContext ctx) {
+    String tabname = evalPop(ctx.expr(0)).toString();
+    StringBuilder sql = new StringBuilder();
+    sql.append("SHOW PARTITIONS ");
+    sql.append(tabname);    
+    int cnt = ctx.expr().size();   
+    if (cnt > 1) {
+      sql.append(" PARTITION (");
+      int i = 1;
+      while (i + 1 < cnt) {
+        String col = evalPop(ctx.expr(i)).toString();
+        String val = evalPop(ctx.expr(i + 1)).toSqlString();
+        if (i > 2) {
+          sql.append(", ");
+        }
+        sql.append(col);
+        sql.append("=");
+        sql.append(val);        
+        i += 2;
+      }
+      sql.append(")");
+    }
+    if (trace) {
+      trace(ctx, "Query: " + sql);
+    }
+    if (exec.getOffline()) {
+      evalNull();
+      return;
+    }
+    Query query = exec.executeQuery(ctx, sql.toString(), exec.conf.defaultConnection);
+    if (query.error()) {
+      evalNullClose(query, exec.conf.defaultConnection);
+      return;
+    }
+    int result = 0;
+    ResultSet rs = query.getResultSet();
+    try {
+      while (rs.next()) {
+        result++;
+      }
+    } catch (SQLException e) {
+      evalNullClose(query, exec.conf.defaultConnection);
+      return;
+    }
+    evalInt(result);
+    exec.closeQuery(query, exec.conf.defaultConnection);
+  }
+  
+  /**
+   * PART_COUNT_BY function
+   */
+  public void partCountBy(HplsqlParser.Expr_func_paramsContext ctx) {
+    int cnt = ctx.func_param().size();
+    if (cnt < 1 || exec.getOffline()) {
+      return;
+    }
+    String tabname = evalPop(ctx.func_param(0).expr()).toString();
+    ArrayList<String> keys = null;
+    if (cnt > 1) {
+      keys = new ArrayList<String>();
+      for (int i = 1; i < cnt; i++) {
+        keys.add(evalPop(ctx.func_param(i).expr()).toString().toUpperCase());
+      }
+    }    
+    String sql = "SHOW PARTITIONS " + tabname;
+    Query query = exec.executeQuery(ctx, sql, exec.conf.defaultConnection);
+    if (query.error()) {
+      exec.closeQuery(query, exec.conf.defaultConnection);
+      return;
+    }
+    ResultSet rs = query.getResultSet();
+    HashMap<String, Integer> group = new HashMap<String, Integer>();
+    try {
+      while (rs.next()) {
+        String part = rs.getString(1);
+        String[] parts = part.split("/");
+        String key = parts[0];
+        if (cnt > 1) {
+          StringBuilder k = new StringBuilder();
+          for (int i = 0; i < parts.length; i++) {
+            if (keys.contains(parts[i].split("=")[0].toUpperCase())) {
+              if (k.length() > 0) {
+                k.append("/");
+              }
+              k.append(parts[i]);
+            }
+          }
+          key = k.toString();
+        }
+        Integer count = group.get(key);
+        if (count == null) {
+          count = new Integer(0); 
+        }
+        group.put(key, count + 1);        
+      }
+    } catch (SQLException e) {
+      exec.closeQuery(query, exec.conf.defaultConnection);
+      return;
+    }
+    if (cnt == 1) {
+      evalInt(group.size());
+    }
+    else {
+      for (Map.Entry<String, Integer> i : group.entrySet()) {
+        System.out.println(i.getKey() + '\t' + i.getValue());
+      }
+    }
+    exec.closeQuery(query, exec.conf.defaultConnection);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
----------------------------------------------------------------------
diff --git a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
index 6a67cd0..8299828 100644
--- a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
+++ b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlLocal.java
@@ -94,6 +94,10 @@ public class TestHplsqlLocal {
   }
   
   @Test
+  public void testCreateProcedure2() throws Exception {
+    run("create_procedure2");
+  }
+  @Test
   public void testCreateProcedureNoParams() throws Exception {
     run("create_procedure_no_params");
   }
@@ -162,8 +166,17 @@ public class TestHplsqlLocal {
   public void testIf() throws Exception {
     run("if");
   }
+  
+  @Test
+  public void testIf2() throws Exception {
+    run("if2");
+  }
 
   @Test
+  public void testInclude() throws Exception {
+    run("include");
+  }
+  @Test
   public void testInstr() throws Exception {
     run("instr");
   }
@@ -199,6 +212,11 @@ public class TestHplsqlLocal {
   }
 
   @Test
+  public void testMultDiv() throws Exception {
+    run("mult_div");
+  }
+
+  @Test
   public void testNvl() throws Exception {
     run("nvl");
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
----------------------------------------------------------------------
diff --git a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
index eeaa395..55238ed 100644
--- a/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
+++ b/hplsql/src/test/java/org/apache/hive/hplsql/TestHplsqlOffline.java
@@ -43,6 +43,11 @@ public class TestHplsqlOffline {
     run("create_table_ora");
   }
   
+  @Test
+  public void testSelectDb2() throws Exception {
+    run("select_db2");
+  }
+
   /**
    * Run a test file
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/cmp_row_count.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/cmp_row_count.sql b/hplsql/src/test/queries/db/cmp_row_count.sql
new file mode 100644
index 0000000..b33d841
--- /dev/null
+++ b/hplsql/src/test/queries/db/cmp_row_count.sql
@@ -0,0 +1,4 @@
+cmp row_count src, src at hive2conn;
+cmp row_count src where 1=1, src at hive2conn;
+cmp row_count (select 'A' from src), src where 2=2 at hive2conn;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/cmp_sum.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/cmp_sum.sql b/hplsql/src/test/queries/db/cmp_sum.sql
new file mode 100644
index 0000000..32347e1
--- /dev/null
+++ b/hplsql/src/test/queries/db/cmp_sum.sql
@@ -0,0 +1,3 @@
+cmp sum src_dt, src_dt at hive2conn;
+cmp sum src_dt where 1=1, src_dt at hive2conn;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/copy_to_file.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/copy_to_file.sql b/hplsql/src/test/queries/db/copy_to_file.sql
new file mode 100644
index 0000000..6135471
--- /dev/null
+++ b/hplsql/src/test/queries/db/copy_to_file.sql
@@ -0,0 +1,2 @@
+copy src to target/tmp/src.txt;
+copy (select * from src) to target/tmp/src2.txt sqlinsert src2;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/copy_to_hdfs.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/copy_to_hdfs.sql b/hplsql/src/test/queries/db/copy_to_hdfs.sql
new file mode 100644
index 0000000..fd01d7b
--- /dev/null
+++ b/hplsql/src/test/queries/db/copy_to_hdfs.sql
@@ -0,0 +1,2 @@
+--copy src to hdfs src.txt;
+copy (select * from src) to hdfs /user/hplsql/src2.txt delimiter '\01';

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/copy_to_table.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/copy_to_table.sql b/hplsql/src/test/queries/db/copy_to_table.sql
new file mode 100644
index 0000000..674c0fc
--- /dev/null
+++ b/hplsql/src/test/queries/db/copy_to_table.sql
@@ -0,0 +1,2 @@
+copy src to src2 at mysqlconn;
+copy (select * from src) to src2 at mysqlconn;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/part_count.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/part_count.sql b/hplsql/src/test/queries/db/part_count.sql
new file mode 100644
index 0000000..9d62c38
--- /dev/null
+++ b/hplsql/src/test/queries/db/part_count.sql
@@ -0,0 +1,17 @@
+if part_count(partition_date_1) = 5 then
+  print 'success';
+else 
+  print 'failed';
+end if;  
+
+if part_count(partition_date_1, region='1') = 2 then
+  print 'success';
+else 
+  print 'failed';
+end if;  
+
+if part_count(partition_date_1a) is null then    -- table does not exist  
+  print 'success';
+else 
+  print 'failed';
+end if;  

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/part_count_by.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/part_count_by.sql b/hplsql/src/test/queries/db/part_count_by.sql
new file mode 100644
index 0000000..599dc5b
--- /dev/null
+++ b/hplsql/src/test/queries/db/part_count_by.sql
@@ -0,0 +1,4 @@
+part_count_by(partition_date_1); 
+part_count_by(partition_date_1, dt); 
+part_count_by(partition_date_1, dt, region); 
+part_count_by(partition_date_1, region); 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/schema.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/schema.sql b/hplsql/src/test/queries/db/schema.sql
new file mode 100644
index 0000000..0c41569
--- /dev/null
+++ b/hplsql/src/test/queries/db/schema.sql
@@ -0,0 +1,32 @@
+drop table if exists src_dt;
+
+create table src_dt (
+  c1  string,
+  c2  varchar(30),
+  c3  char(30),
+  c4  tinyint,
+  c5  smallint,
+  c6  int,
+  c7  bigint,
+  c8  decimal(19,4),
+  c9  float,
+  c10 double,
+  c11 date,
+  c12 timestamp
+);
+
+insert overwrite table src_dt
+select 
+  value c1,
+  value c2,
+  value c3,
+  cast(key as tinyint) c4,
+  cast(key as smallint) c5,
+  cast(key as int) c6,
+  cast(key as bigint) c7,
+  cast(key as decimal)/10 c8,
+  cast(key as float)/10 c9,
+  cast(key as double)/10 c10,
+  date '2015-09-07' c11,
+  cast(date '2015-09-07' as timestamp) c12
+from src;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/select_into.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/select_into.sql b/hplsql/src/test/queries/db/select_into.sql
index 3995ba2..1da610a 100644
--- a/hplsql/src/test/queries/db/select_into.sql
+++ b/hplsql/src/test/queries/db/select_into.sql
@@ -1,17 +1,33 @@
+DECLARE v_bint BIGINT;
 DECLARE v_int INT;
+DECLARE v_sint SMALLINT;
+DECLARE v_tint TINYINT;
 DECLARE v_dec DECIMAL(18,2);
 DECLARE v_dec0 DECIMAL(18,0);
+DECLARE v_str STRING;
 
 SELECT TOP 1 
+  CAST(1 AS BIGINT),
   CAST(1 AS INT), 
+  CAST(1 AS SMALLINT), 
+  CAST(1 AS TINYINT), 
   CAST(1.1 AS DECIMAL(18,2)),
   CAST(1.1 AS DECIMAL(18,0))   
 INTO 
+  v_bint,
   v_int,
+  v_sint,
+  v_tint,
   v_dec,
   v_dec0  
-FROM src ;
+FROM src;
         
+PRINT 'BIGINT: ' || v_bint;
 PRINT 'INT: ' || v_int;
+PRINT 'SMALLINT: ' || v_sint;
+PRINT 'TINYINT: ' || v_tint;
 PRINT 'DECIMAL: ' || v_dec;
-PRINT 'DECIMAL0: ' || v_dec0;
\ No newline at end of file
+PRINT 'DECIMAL0: ' || v_dec0;
+
+select 'a' into v_str from src limit 1;
+print 'string: ' || v_str;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/db/select_into2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/db/select_into2.sql b/hplsql/src/test/queries/db/select_into2.sql
new file mode 100644
index 0000000..e0f738c
--- /dev/null
+++ b/hplsql/src/test/queries/db/select_into2.sql
@@ -0,0 +1,17 @@
+declare v_float float;
+declare v_double double;
+declare v_double2 double precision;
+
+select
+  cast(1.1 as float),
+  cast(1.1 as double),
+  cast(1.1 as double)  
+into
+  v_float,
+  v_double,
+  v_double2
+from src limit 1;
+        
+print 'float: ' || v_float;
+print 'double: ' || v_double;
+print 'double precision: ' || v_double2;

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/create_procedure2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/create_procedure2.sql b/hplsql/src/test/queries/local/create_procedure2.sql
new file mode 100644
index 0000000..8875c6a
--- /dev/null
+++ b/hplsql/src/test/queries/local/create_procedure2.sql
@@ -0,0 +1,16 @@
+CREATE PROCEDURE set_message(IN name STRING, OUT result STRING)
+BEGIN
+  DECLARE str STRING DEFAULT 'Hello, ' || name || '!';
+  Work: begin
+    declare continue handler for sqlexception begin
+      set result = null;
+      print 'error';
+    end;
+    set result = str;
+  end;
+END;
+ 
+DECLARE str STRING;
+CALL set_message('world', str);
+PRINT str;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/if2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/if2.sql b/hplsql/src/test/queries/local/if2.sql
new file mode 100644
index 0000000..b645b86
--- /dev/null
+++ b/hplsql/src/test/queries/local/if2.sql
@@ -0,0 +1,5 @@
+if not (coalesce(1,0) between 3 and 5) then
+  print 'correct';
+else 
+  print 'failed';
+end if;

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/include.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/include.sql b/hplsql/src/test/queries/local/include.sql
new file mode 100644
index 0000000..c1dfb96
--- /dev/null
+++ b/hplsql/src/test/queries/local/include.sql
@@ -0,0 +1,2 @@
+include src/test/queries/local/include_file.sql
+include 'src/test/queries/local/include_file' || '.sql'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/include_file.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/include_file.sql b/hplsql/src/test/queries/local/include_file.sql
new file mode 100644
index 0000000..ac5e0f0
--- /dev/null
+++ b/hplsql/src/test/queries/local/include_file.sql
@@ -0,0 +1 @@
+print 'file included successfully';
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/local/mult_div.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/local/mult_div.sql b/hplsql/src/test/queries/local/mult_div.sql
new file mode 100644
index 0000000..ebad8f4
--- /dev/null
+++ b/hplsql/src/test/queries/local/mult_div.sql
@@ -0,0 +1,8 @@
+declare a int default 8;
+declare b int default 4;
+declare c int default 2;
+
+print a/b/c;
+
+set a = 4 * 2 / cast(4 as int) /2;
+set b = 4 * 2 /cast(4 as int)/2;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/queries/offline/select_db2.sql
----------------------------------------------------------------------
diff --git a/hplsql/src/test/queries/offline/select_db2.sql b/hplsql/src/test/queries/offline/select_db2.sql
new file mode 100644
index 0000000..a0d2da5
--- /dev/null
+++ b/hplsql/src/test/queries/offline/select_db2.sql
@@ -0,0 +1,5 @@
+select coalesce(max(info_id)+1,0) into NextID from sproc_info with rr use and keep exclusive locks;
+
+select cd, cd + inc days, cd - inc days + coalesce(inc, 0) days
+from (select date '2015-09-02' as cd, 3 as inc from sysibm.sysdummy1);
+

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/cmp_row_count.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/cmp_row_count.out.txt b/hplsql/src/test/results/db/cmp_row_count.out.txt
new file mode 100644
index 0000000..16fadfd
--- /dev/null
+++ b/hplsql/src/test/results/db/cmp_row_count.out.txt
@@ -0,0 +1,12 @@
+Ln:1 CMP
+Ln:1 Query 1: SELECT COUNT(1) AS row_count FROM src
+Ln:1 Query 2: SELECT COUNT(1) AS row_count FROM src
+row_count	500	500
+Ln:2 CMP
+Ln:2 Query 1: SELECT COUNT(1) AS row_count FROM src where 1 = 1
+Ln:2 Query 2: SELECT COUNT(1) AS row_count FROM src
+row_count	500	500
+Ln:3 CMP
+Ln:3 Query 1: SELECT COUNT(1) AS row_count FROM (select 'A' from src) t
+Ln:3 Query 2: SELECT COUNT(1) AS row_count FROM src where 2 = 2
+row_count	500	500
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/cmp_sum.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/cmp_sum.out.txt b/hplsql/src/test/results/db/cmp_sum.out.txt
new file mode 100644
index 0000000..fad64b7
--- /dev/null
+++ b/hplsql/src/test/results/db/cmp_sum.out.txt
@@ -0,0 +1,320 @@
+Ln:1 CMP
+Ln:1 Query 1: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt
+Ln:1 Query 2: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt
+row_count	500	500
+c1_count_not_null	500	500
+c1_sum_length	3406	3406
+c1_min_length	5	5
+c1_max_length	7	7
+c2_count_not_null	500	500
+c2_sum_length	3406	3406
+c2_min_length	5	5
+c2_max_length	7	7
+c3_count_not_null	500	500
+c3_sum_length	3406	3406
+c3_min_length	5	5
+c3_max_length	7	7
+c4_count_not_null	106	106
+c4_sum	6697	6697
+c4_min	0	0
+c4_max	126	126
+c5_count_not_null	500	500
+c5_sum	130091	130091
+c5_min	0	0
+c5_max	498	498
+c6_count_not_null	500	500
+c6_sum	130091	130091
+c6_min	0	0
+c6_max	498	498
+c7_count_not_null	500	500
+c7_sum	130091	130091
+c7_min	0	0
+c7_max	498	498
+c8_count_not_null	500	500
+c8_sum	13009.1	13009.1
+c8_min	0	0
+c8_max	49.8	49.8
+c9_count_not_null	500	500
+c9_sum	13009.10001783073	13009.10001783073
+c9_min	0.0	0.0
+c9_max	49.79999923706055	49.79999923706055
+c10_count_not_null	500	500
+c10_sum	13009.09999999999	13009.09999999999
+c10_min	0.0	0.0
+c10_max	49.8	49.8
+c11_count_not_null	500	500
+c11_sum_year	1007500	1007500
+c11_sum_month	4500	4500
+c11_sum_day	3500	3500
+c11_min	null	null
+c11_max	null	null
+c12_count_not_null	500	500
+c12_sum_year	1007500	1007500
+c12_sum_month	4500	4500
+c12_sum_day	3500	3500
+c12_min	null	null
+c12_max	null	null
+Ln:2 CMP
+Ln:2 Query 1: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt where 1 = 1
+Ln:2 Query 2: SELECT COUNT(1) AS row_count,
+COUNT(C1) AS C1_COUNT_NOT_NULL,
+SUM(LENGTH(C1)) AS C1_SUM_LENGTH,
+MIN(LENGTH(C1)) AS C1_MIN_LENGTH,
+MAX(LENGTH(C1)) AS C1_MAX_LENGTH,
+COUNT(C2) AS C2_COUNT_NOT_NULL,
+SUM(LENGTH(C2)) AS C2_SUM_LENGTH,
+MIN(LENGTH(C2)) AS C2_MIN_LENGTH,
+MAX(LENGTH(C2)) AS C2_MAX_LENGTH,
+COUNT(C3) AS C3_COUNT_NOT_NULL,
+SUM(LENGTH(C3)) AS C3_SUM_LENGTH,
+MIN(LENGTH(C3)) AS C3_MIN_LENGTH,
+MAX(LENGTH(C3)) AS C3_MAX_LENGTH,
+COUNT(C4) AS C4_COUNT_NOT_NULL,
+SUM(C4) AS C4_SUM,
+MIN(C4) AS C4_MIN,
+MAX(C4) AS C4_MAX,
+COUNT(C5) AS C5_COUNT_NOT_NULL,
+SUM(C5) AS C5_SUM,
+MIN(C5) AS C5_MIN,
+MAX(C5) AS C5_MAX,
+COUNT(C6) AS C6_COUNT_NOT_NULL,
+SUM(C6) AS C6_SUM,
+MIN(C6) AS C6_MIN,
+MAX(C6) AS C6_MAX,
+COUNT(C7) AS C7_COUNT_NOT_NULL,
+SUM(C7) AS C7_SUM,
+MIN(C7) AS C7_MIN,
+MAX(C7) AS C7_MAX,
+COUNT(C8) AS C8_COUNT_NOT_NULL,
+SUM(C8) AS C8_SUM,
+MIN(C8) AS C8_MIN,
+MAX(C8) AS C8_MAX,
+COUNT(C9) AS C9_COUNT_NOT_NULL,
+SUM(C9) AS C9_SUM,
+MIN(C9) AS C9_MIN,
+MAX(C9) AS C9_MAX,
+COUNT(C10) AS C10_COUNT_NOT_NULL,
+SUM(C10) AS C10_SUM,
+MIN(C10) AS C10_MIN,
+MAX(C10) AS C10_MAX,
+COUNT(C11) AS C11_COUNT_NOT_NULL,
+SUM(YEAR(C11)) AS C11_SUM_YEAR,
+SUM(MONTH(C11)) AS C11_SUM_MONTH,
+SUM(DAY(C11)) AS C11_SUM_DAY,
+MIN(C11) AS C11_MIN,
+MAX(C11) AS C11_MAX,
+COUNT(C12) AS C12_COUNT_NOT_NULL,
+SUM(YEAR(C12)) AS C12_SUM_YEAR,
+SUM(MONTH(C12)) AS C12_SUM_MONTH,
+SUM(DAY(C12)) AS C12_SUM_DAY,
+MIN(C12) AS C12_MIN,
+MAX(C12) AS C12_MAX FROM src_dt
+row_count	500	500
+c1_count_not_null	500	500
+c1_sum_length	3406	3406
+c1_min_length	5	5
+c1_max_length	7	7
+c2_count_not_null	500	500
+c2_sum_length	3406	3406
+c2_min_length	5	5
+c2_max_length	7	7
+c3_count_not_null	500	500
+c3_sum_length	3406	3406
+c3_min_length	5	5
+c3_max_length	7	7
+c4_count_not_null	106	106
+c4_sum	6697	6697
+c4_min	0	0
+c4_max	126	126
+c5_count_not_null	500	500
+c5_sum	130091	130091
+c5_min	0	0
+c5_max	498	498
+c6_count_not_null	500	500
+c6_sum	130091	130091
+c6_min	0	0
+c6_max	498	498
+c7_count_not_null	500	500
+c7_sum	130091	130091
+c7_min	0	0
+c7_max	498	498
+c8_count_not_null	500	500
+c8_sum	13009.1	13009.1
+c8_min	0	0
+c8_max	49.8	49.8
+c9_count_not_null	500	500
+c9_sum	13009.10001783073	13009.10001783073
+c9_min	0.0	0.0
+c9_max	49.79999923706055	49.79999923706055
+c10_count_not_null	500	500
+c10_sum	13009.09999999999	13009.09999999999
+c10_min	0.0	0.0
+c10_max	49.8	49.8
+c11_count_not_null	500	500
+c11_sum_year	1007500	1007500
+c11_sum_month	4500	4500
+c11_sum_day	3500	3500
+c11_min	null	null
+c11_max	null	null
+c12_count_not_null	500	500
+c12_sum_year	1007500	1007500
+c12_sum_month	4500	4500
+c12_sum_day	3500	3500
+c12_min	null	null
+c12_max	null	null
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/copy_to_file.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/copy_to_file.out.txt b/hplsql/src/test/results/db/copy_to_file.out.txt
new file mode 100644
index 0000000..e571d36
--- /dev/null
+++ b/hplsql/src/test/results/db/copy_to_file.out.txt
@@ -0,0 +1,6 @@
+Ln:1 COPY
+Ln:1 Query executed: 2 columns, output file: target/tmp/src.txt
+Ln:2 COPY
+Ln:2 Statement:
+select * from src
+Ln:2 Query executed: 2 columns, output file: target/tmp/src2.txt
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/copy_to_hdfs.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/copy_to_hdfs.out.txt b/hplsql/src/test/results/db/copy_to_hdfs.out.txt
new file mode 100644
index 0000000..23c0cb2
--- /dev/null
+++ b/hplsql/src/test/results/db/copy_to_hdfs.out.txt
@@ -0,0 +1,4 @@
+Ln:2 COPY
+Ln:2 Statement:
+select * from src
+Ln:2 Query executed: 2 columns, output file: /user/hplsql/src2.txt
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/copy_to_table.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/copy_to_table.out.txt b/hplsql/src/test/results/db/copy_to_table.out.txt
new file mode 100644
index 0000000..411b425
--- /dev/null
+++ b/hplsql/src/test/results/db/copy_to_table.out.txt
@@ -0,0 +1,2 @@
+Ln:1 COPY
+Ln:1 SELECT executed: 2 columns
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/part_count.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/part_count.out.txt b/hplsql/src/test/results/db/part_count.out.txt
new file mode 100644
index 0000000..485ffe1
--- /dev/null
+++ b/hplsql/src/test/results/db/part_count.out.txt
@@ -0,0 +1,15 @@
+Ln:1 IF
+Ln:1 Query: SHOW PARTITIONS partition_date_1
+Ln:1 IF TRUE executed
+Ln:2 PRINT
+success
+Ln:7 IF
+Ln:7 Query: SHOW PARTITIONS partition_date_1 PARTITION (region='1')
+Ln:7 IF TRUE executed
+Ln:8 PRINT
+success
+Ln:13 IF
+Ln:13 Query: SHOW PARTITIONS partition_date_1a
+Ln:13 IF TRUE executed
+Ln:14 PRINT
+success
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/part_count_by.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/part_count_by.out.txt b/hplsql/src/test/results/db/part_count_by.out.txt
new file mode 100644
index 0000000..61f51cd
--- /dev/null
+++ b/hplsql/src/test/results/db/part_count_by.out.txt
@@ -0,0 +1,13 @@
+3
+dt=2000-01-01	2
+dt=2013-12-10	1
+dt=2013-08-08	2
+dt=2013-08-08/region=1	1
+dt=2000-01-01/region=1	1
+dt=2013-12-10/region=2020-20-20	1
+dt=2000-01-01/region=2	1
+dt=2013-08-08/region=10	1
+region=10	1
+region=2020-20-20	1
+region=2	1
+region=1	2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/select_into.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/select_into.out.txt b/hplsql/src/test/results/db/select_into.out.txt
index 80d067e..3f4ae31 100644
--- a/hplsql/src/test/results/db/select_into.out.txt
+++ b/hplsql/src/test/results/db/select_into.out.txt
@@ -1,19 +1,43 @@
-Ln:1 DECLARE v_int INT
-Ln:2 DECLARE v_dec DECIMAL
-Ln:3 DECLARE v_dec0 DECIMAL
-Ln:5 SELECT
-Ln:5 SELECT CAST(1 AS INT), CAST(1.1 AS DECIMAL(18,2)), CAST(1.1 AS DECIMAL(18,0)) FROM src LIMIT 1
-Ln:5 SELECT completed successfully
-Ln:5 SELECT INTO statement executed
-Ln:5 COLUMN: _c0, int
-Ln:5 SET v_int = 1
-Ln:5 COLUMN: _c1, decimal
-Ln:5 SET v_dec = 1.1
-Ln:5 COLUMN: _c2, decimal
-Ln:5 SET v_dec0 = 1
-Ln:15 PRINT
+Ln:1 DECLARE v_bint BIGINT
+Ln:2 DECLARE v_int INT
+Ln:3 DECLARE v_sint SMALLINT
+Ln:4 DECLARE v_tint TINYINT
+Ln:5 DECLARE v_dec DECIMAL
+Ln:6 DECLARE v_dec0 DECIMAL
+Ln:7 DECLARE v_str STRING
+Ln:9 SELECT
+Ln:9 SELECT CAST(1 AS BIGINT), CAST(1 AS INT), CAST(1 AS SMALLINT), CAST(1 AS TINYINT), CAST(1.1 AS DECIMAL(18,2)), CAST(1.1 AS DECIMAL(18,0)) FROM src LIMIT 1
+Ln:9 SELECT completed successfully
+Ln:9 SELECT INTO statement executed
+Ln:9 COLUMN: _c0, bigint
+Ln:9 SET v_bint = 1
+Ln:9 COLUMN: _c1, int
+Ln:9 SET v_int = 1
+Ln:9 COLUMN: _c2, smallint
+Ln:9 SET v_sint = 1
+Ln:9 COLUMN: _c3, tinyint
+Ln:9 SET v_tint = 1
+Ln:9 COLUMN: _c4, decimal
+Ln:9 SET v_dec = 1.1
+Ln:9 COLUMN: _c5, decimal
+Ln:9 SET v_dec0 = 1
+Ln:25 PRINT
+BIGINT: 1
+Ln:26 PRINT
 INT: 1
-Ln:16 PRINT
+Ln:27 PRINT
+SMALLINT: 1
+Ln:28 PRINT
+TINYINT: 1
+Ln:29 PRINT
 DECIMAL: 1.1
-Ln:17 PRINT
-DECIMAL0: 1
\ No newline at end of file
+Ln:30 PRINT
+DECIMAL0: 1
+Ln:32 SELECT
+Ln:32 select 'a' from src LIMIT 1
+Ln:32 SELECT completed successfully
+Ln:32 SELECT INTO statement executed
+Ln:32 COLUMN: _c0, string
+Ln:32 SET v_str = a
+Ln:33 PRINT
+string: a
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/db/select_into2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/db/select_into2.out.txt b/hplsql/src/test/results/db/select_into2.out.txt
new file mode 100644
index 0000000..03e67ad
--- /dev/null
+++ b/hplsql/src/test/results/db/select_into2.out.txt
@@ -0,0 +1,19 @@
+Ln:1 DECLARE v_float float
+Ln:2 DECLARE v_double double
+Ln:3 DECLARE v_double2 double precision
+Ln:5 SELECT
+Ln:5 select cast(1.1 as float), cast(1.1 as double), cast(1.1 as double) from src LIMIT 1
+Ln:5 SELECT completed successfully
+Ln:5 SELECT INTO statement executed
+Ln:5 COLUMN: _c0, float
+Ln:5 SET v_float = 1.100000023841858
+Ln:5 COLUMN: _c1, double
+Ln:5 SET v_double = 1.1
+Ln:5 COLUMN: _c2, double
+Ln:5 SET v_double2 = 1.1
+Ln:15 PRINT
+float: 1.100000023841858
+Ln:16 PRINT
+double: 1.1
+Ln:17 PRINT
+double precision: 1.1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/create_procedure2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/create_procedure2.out.txt b/hplsql/src/test/results/local/create_procedure2.out.txt
new file mode 100644
index 0000000..765faa9
--- /dev/null
+++ b/hplsql/src/test/results/local/create_procedure2.out.txt
@@ -0,0 +1,10 @@
+Ln:1 CREATE PROCEDURE set_message
+Ln:13 DECLARE str STRING
+Ln:14 EXEC PROCEDURE set_message
+Ln:14 SET PARAM name = world
+Ln:14 SET PARAM result = null
+Ln:3 DECLARE str STRING = 'Hello, world!'
+Ln:5 DECLARE HANDLER
+Ln:9 SET result = 'Hello, world!'
+Ln:15 PRINT
+Hello, world!
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/if2.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/if2.out.txt b/hplsql/src/test/results/local/if2.out.txt
new file mode 100644
index 0000000..63a6213
--- /dev/null
+++ b/hplsql/src/test/results/local/if2.out.txt
@@ -0,0 +1,4 @@
+Ln:1 IF
+Ln:1 IF TRUE executed
+Ln:2 PRINT
+correct
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/include.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/include.out.txt b/hplsql/src/test/results/local/include.out.txt
new file mode 100644
index 0000000..86cfa05
--- /dev/null
+++ b/hplsql/src/test/results/local/include.out.txt
@@ -0,0 +1,8 @@
+Ln:1 INCLUDE src/test/queries/local/include_file.sql
+INLCUDE CONTENT src/test/queries/local/include_file.sql (non-empty)
+Ln:1 PRINT
+file included successfully
+Ln:2 INCLUDE src/test/queries/local/include_file.sql
+INLCUDE CONTENT src/test/queries/local/include_file.sql (non-empty)
+Ln:1 PRINT
+file included successfully
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/06790789/hplsql/src/test/results/local/mult_div.out.txt
----------------------------------------------------------------------
diff --git a/hplsql/src/test/results/local/mult_div.out.txt b/hplsql/src/test/results/local/mult_div.out.txt
new file mode 100644
index 0000000..cd17c16
--- /dev/null
+++ b/hplsql/src/test/results/local/mult_div.out.txt
@@ -0,0 +1,7 @@
+Ln:1 DECLARE a int = 8
+Ln:2 DECLARE b int = 4
+Ln:3 DECLARE c int = 2
+Ln:5 PRINT
+1
+Ln:7 SET a = 1
+Ln:8 SET b = 1
\ No newline at end of file