You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/12/24 15:51:20 UTC

svn commit: r1647800 - in /hive/branches/HIVE-8065: itests/src/test/resources/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/encrypted/

Author: brock
Date: Wed Dec 24 14:51:19 2014
New Revision: 1647800

URL: http://svn.apache.org/r1647800
Log:
HIVE-8822 - Create unit test where we insert into statically partitioned table (Dong Chen via Brock)

Added:
    hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_insert_partition_static.q
    hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out
Modified:
    hive/branches/HIVE-8065/itests/src/test/resources/testconfiguration.properties

Modified: hive/branches/HIVE-8065/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/itests/src/test/resources/testconfiguration.properties?rev=1647800&r1=1647799&r2=1647800&view=diff
==============================================================================
--- hive/branches/HIVE-8065/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/HIVE-8065/itests/src/test/resources/testconfiguration.properties Wed Dec 24 14:51:19 2014
@@ -271,7 +271,8 @@ minitez.query.files=bucket_map_join_tez1
   tez_smb_1.q,\
   vectorized_dynamic_partition_pruning.q
 
-encrypted.query.files.shared=join_unencrypted_encrypted_tbls.q
+encrypted.query.files.shared=join_unencrypted_encrypted_tbls.q,\
+  encryption_insert_partition_static.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\

Added: hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_insert_partition_static.q
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_insert_partition_static.q?rev=1647800&view=auto
==============================================================================
--- hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_insert_partition_static.q (added)
+++ hive/branches/HIVE-8065/ql/src/test/queries/clientpositive/encryption_insert_partition_static.q Wed Dec 24 14:51:19 2014
@@ -0,0 +1,58 @@
+set hive.support.concurrency=true;
+set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
+set hive.enforce.bucketing=true;
+
+-- SORT_QUERY_RESULTS
+
+-- init
+drop table IF EXISTS encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey;
+drop table IF EXISTS unencryptedTable;
+
+create table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey(key string,
+    value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+
+create table unencryptedTable(key string,
+    value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true');
+
+-- insert encrypted table from values
+explain extended insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='today') values
+    ('501', 'val_501'),
+    ('502', 'val_502');
+
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='today') values
+    ('501', 'val_501'),
+    ('502', 'val_502');
+
+select * from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey order by key;
+
+-- insert encrypted table from unencrypted source
+explain extended from src
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='yesterday')
+    select * limit 2;
+
+from src
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='yesterday')
+    select * limit 2;
+
+select * from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey order by key;
+
+-- insert unencrypted table from encrypted source
+explain extended from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+insert into table unencryptedTable partition
+    (ds='today')
+    select key, value;
+
+from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+insert into table unencryptedTable partition
+    (ds='today')
+    select key, value;
+
+select * from unencryptedTable order by key;
+
+-- clean up
+drop table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey;
+drop table unencryptedTable;
\ No newline at end of file

Added: hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out?rev=1647800&view=auto
==============================================================================
--- hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out (added)
+++ hive/branches/HIVE-8065/ql/src/test/results/clientpositive/encrypted/encryption_insert_partition_static.q.out Wed Dec 24 14:51:19 2014
@@ -0,0 +1,785 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+-- init
+drop table IF EXISTS encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+-- init
+drop table IF EXISTS encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table IF EXISTS unencryptedTable
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table IF EXISTS unencryptedTable
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey(key string,
+    value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:encryptedwith128bitskeydb
+PREHOOK: Output: encryptedWith128BitsKeyDB@encryptedTableIn128BitsKey
+POSTHOOK: query: create table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey(key string,
+    value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:encryptedwith128bitskeydb
+POSTHOOK: Output: encryptedWith128BitsKeyDB@encryptedTableIn128BitsKey
+PREHOOK: query: create table unencryptedTable(key string,
+    value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@unencryptedTable
+POSTHOOK: query: create table unencryptedTable(key string,
+    value string) partitioned by (ds string) clustered by (key) into 2 buckets stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@unencryptedTable
+PREHOOK: query: -- insert encrypted table from values
+explain extended insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='today') values
+    ('501', 'val_501'),
+    ('502', 'val_502')
+PREHOOK: type: QUERY
+POSTHOOK: query: -- insert encrypted table from values
+explain extended insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='today') values
+    ('501', 'val_501'),
+    ('502', 'val_502')
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      null
+         null
+            Values__Tmp__Table__1
+   TOK_INSERT
+      TOK_INSERT_INTO
+         TOK_TAB
+            TOK_TABNAME
+               encryptedWith128BitsKeyDB
+               encryptedTableIn128BitsKey
+            TOK_PARTSPEC
+               TOK_PARTVAL
+                  ds
+                  'today'
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_ALLCOLREF
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: values__tmp__table__1
+            Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: tmp_values_col1 (type: string), tmp_values_col2 (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Map-reduce partition columns: _col0 (type: string)
+                Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                tag: -1
+                value expressions: _col0 (type: string), _col1 (type: string)
+                auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: Values__Tmp__Table__1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns tmp_values_col1,tmp_values_col2
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.values__tmp__table__1
+              serialization.ddl struct values__tmp__table__1 { string tmp_values_col1, string tmp_values_col2}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns tmp_values_col1,tmp_values_col2
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.values__tmp__table__1
+                serialization.ddl struct values__tmp__table__1 { string tmp_values_col1, string tmp_values_col2}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.values__tmp__table__1
+            name: default.values__tmp__table__1
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+          File Output Operator
+            compressed: false
+            GlobalTableId: 1
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+            Static Partition Specification: ds=today/
+            Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+                properties:
+                  bucket_count 2
+                  bucket_field_name key
+                  columns key,value
+                  columns.comments 
+                  columns.types string:string
+#### A masked pattern was here ####
+                  name encryptedwith128bitskeydb.encryptedtablein128bitskey
+                  partition_columns ds
+                  partition_columns.types string
+                  serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+                  serialization.format 1
+                  serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                  transactional true
+#### A masked pattern was here ####
+                serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+            TotalFiles: 1
+            GatherStats: true
+            MultiFileSpray: false
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds today
+          replace: false
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name encryptedwith128bitskeydb.encryptedtablein128bitskey
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                transactional true
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='today') values
+    ('501', 'val_501'),
+    ('502', 'val_502')
+PREHOOK: type: QUERY
+PREHOOK: Input: default@values__tmp__table__2
+PREHOOK: Output: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+POSTHOOK: query: insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='today') values
+    ('501', 'val_501'),
+    ('502', 'val_502')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@values__tmp__table__2
+POSTHOOK: Output: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+POSTHOOK: Lineage: encryptedtablein128bitskey PARTITION(ds=today).key SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: encryptedtablein128bitskey PARTITION(ds=today).value SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: select * from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+#### A masked pattern was here ####
+POSTHOOK: query: select * from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+#### A masked pattern was here ####
+501	val_501	today
+502	val_502	today
+PREHOOK: query: -- insert encrypted table from unencrypted source
+explain extended from src
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='yesterday')
+    select * limit 2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- insert encrypted table from unencrypted source
+explain extended from src
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='yesterday')
+    select * limit 2
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            src
+   TOK_INSERT
+      TOK_INSERT_INTO
+         TOK_TAB
+            TOK_TABNAME
+               encryptedWith128BitsKeyDB
+               encryptedTableIn128BitsKey
+            TOK_PARTSPEC
+               TOK_PARTVAL
+                  ds
+                  'yesterday'
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_ALLCOLREF
+      TOK_LIMIT
+         2
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+  Stage-3 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+              Limit
+                Number of rows: 2
+                Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+                  tag: -1
+                  value expressions: _col0 (type: string), _col1 (type: string)
+                  auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments defaultdefault
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE true
+                bucket_count -1
+                columns key,value
+                columns.comments defaultdefault
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+          Limit
+            Number of rows: 2
+            Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  properties:
+                    columns _col0,_col1
+                    columns.types string,string
+                    escape.delim \
+                    serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            GatherStats: false
+            Reduce Output Operator
+              sort order: 
+              Map-reduce partition columns: _col0 (type: string)
+              Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+              tag: -1
+              value expressions: _col0 (type: string), _col1 (type: string)
+              auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: -mr-10001
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0,_col1
+              columns.types string,string
+              escape.delim \
+              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0,_col1
+                columns.types string,string
+                escape.delim \
+                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            GlobalTableId: 1
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+            Static Partition Specification: ds=yesterday/
+            Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+                properties:
+                  bucket_count 2
+                  bucket_field_name key
+                  columns key,value
+                  columns.comments 
+                  columns.types string:string
+#### A masked pattern was here ####
+                  name encryptedwith128bitskeydb.encryptedtablein128bitskey
+                  partition_columns ds
+                  partition_columns.types string
+                  serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+                  serialization.format 1
+                  serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                  transactional true
+#### A masked pattern was here ####
+                serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+            TotalFiles: 1
+            GatherStats: true
+            MultiFileSpray: false
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds yesterday
+          replace: false
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name encryptedwith128bitskeydb.encryptedtablein128bitskey
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                transactional true
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: from src
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='yesterday')
+    select * limit 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=yesterday
+POSTHOOK: query: from src
+insert into table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey partition
+    (ds='yesterday')
+    select * limit 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=yesterday
+POSTHOOK: Lineage: encryptedtablein128bitskey PARTITION(ds=yesterday).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: encryptedtablein128bitskey PARTITION(ds=yesterday).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=yesterday
+#### A masked pattern was here ####
+POSTHOOK: query: select * from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=yesterday
+#### A masked pattern was here ####
+238	val_238	yesterday
+501	val_501	today
+502	val_502	today
+86	val_86	yesterday
+PREHOOK: query: -- insert unencrypted table from encrypted source
+explain extended from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+insert into table unencryptedTable partition
+    (ds='today')
+    select key, value
+PREHOOK: type: QUERY
+POSTHOOK: query: -- insert unencrypted table from encrypted source
+explain extended from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+insert into table unencryptedTable partition
+    (ds='today')
+    select key, value
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            encryptedWith128BitsKeyDB
+            encryptedTableIn128BitsKey
+   TOK_INSERT
+      TOK_INSERT_INTO
+         TOK_TAB
+            TOK_TABNAME
+               unencryptedTable
+            TOK_PARTSPEC
+               TOK_PARTVAL
+                  ds
+                  'today'
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               key
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               value
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: encryptedtablein128bitskey
+            Statistics: Num rows: 12 Data size: 2567 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 12 Data size: 2567 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                sort order: 
+                Map-reduce partition columns: _col0 (type: string)
+                Statistics: Num rows: 12 Data size: 2567 Basic stats: COMPLETE Column stats: NONE
+                tag: -1
+                value expressions: _col0 (type: string), _col1 (type: string)
+                auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=today
+            input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+            output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+            partition values:
+              ds today
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count 2
+              bucket_field_name key
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name encryptedwith128bitskeydb.encryptedtablein128bitskey
+              numFiles 2
+              numRows 0
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 0
+              serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              totalSize 1283
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+          
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name encryptedwith128bitskeydb.encryptedtablein128bitskey
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                transactional true
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+            name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=yesterday
+            input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+            output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+            partition values:
+              ds yesterday
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count 2
+              bucket_field_name key
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name encryptedwith128bitskeydb.encryptedtablein128bitskey
+              numFiles 2
+              numRows 0
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 0
+              serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              totalSize 1284
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+          
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name encryptedwith128bitskeydb.encryptedtablein128bitskey
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct encryptedtablein128bitskey { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                transactional true
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+            name: encryptedwith128bitskeydb.encryptedtablein128bitskey
+      Truncated Path -> Alias:
+        /encryptedwith128bitskeydb.db/encryptedtablein128bitskey/ds=today [encryptedtablein128bitskey]
+        /encryptedwith128bitskeydb.db/encryptedtablein128bitskey/ds=yesterday [encryptedtablein128bitskey]
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Extract
+          Statistics: Num rows: 12 Data size: 2567 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            GlobalTableId: 1
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+            Static Partition Specification: ds=today/
+            Statistics: Num rows: 12 Data size: 2567 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+                output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+                properties:
+                  bucket_count 2
+                  bucket_field_name key
+                  columns key,value
+                  columns.comments 
+                  columns.types string:string
+#### A masked pattern was here ####
+                  name default.unencryptedtable
+                  partition_columns ds
+                  partition_columns.types string
+                  serialization.ddl struct unencryptedtable { string key, string value}
+                  serialization.format 1
+                  serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                  transactional true
+#### A masked pattern was here ####
+                serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                name: default.unencryptedtable
+            TotalFiles: 1
+            GatherStats: true
+            MultiFileSpray: false
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds today
+          replace: false
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
+              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+              properties:
+                bucket_count 2
+                bucket_field_name key
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.unencryptedtable
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct unencryptedtable { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+                transactional true
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+              name: default.unencryptedtable
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+insert into table unencryptedTable partition
+    (ds='today')
+    select key, value
+PREHOOK: type: QUERY
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=yesterday
+PREHOOK: Output: default@unencryptedtable@ds=today
+POSTHOOK: query: from encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+insert into table unencryptedTable partition
+    (ds='today')
+    select key, value
+POSTHOOK: type: QUERY
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=today
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey@ds=yesterday
+POSTHOOK: Output: default@unencryptedtable@ds=today
+POSTHOOK: Lineage: unencryptedtable PARTITION(ds=today).key SIMPLE [(encryptedtablein128bitskey)encryptedtablein128bitskey.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: unencryptedtable PARTITION(ds=today).value SIMPLE [(encryptedtablein128bitskey)encryptedtablein128bitskey.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: select * from unencryptedTable order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@unencryptedtable
+PREHOOK: Input: default@unencryptedtable@ds=today
+#### A masked pattern was here ####
+POSTHOOK: query: select * from unencryptedTable order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@unencryptedtable
+POSTHOOK: Input: default@unencryptedtable@ds=today
+#### A masked pattern was here ####
+238	val_238	today
+501	val_501	today
+502	val_502	today
+86	val_86	today
+PREHOOK: query: -- clean up
+drop table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+PREHOOK: Output: encryptedwith128bitskeydb@encryptedtablein128bitskey
+POSTHOOK: query: -- clean up
+drop table encryptedWith128BitsKeyDB.encryptedTableIn128BitsKey
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: encryptedwith128bitskeydb@encryptedtablein128bitskey
+POSTHOOK: Output: encryptedwith128bitskeydb@encryptedtablein128bitskey
+PREHOOK: query: drop table unencryptedTable
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@unencryptedtable
+PREHOOK: Output: default@unencryptedtable
+POSTHOOK: query: drop table unencryptedTable
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@unencryptedtable
+POSTHOOK: Output: default@unencryptedtable