You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/03/19 23:37:34 UTC

svn commit: r1458549 [4/4] - in /hive/branches/ptf-windowing: ./ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/ beeline/src/java/ beeline/src/java/org/ beeline/src/java/org/apache/ beeline/src/java/org/apache/hive/ beeline/src/java/org/apach...

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_15.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_15.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_15.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_15.q.out Tue Mar 19 22:37:16 2013
@@ -118,23 +118,14 @@ STAGE PLANS:
                   expr: '2'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -199,23 +190,14 @@ STAGE PLANS:
                   expr: '1'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1 partition (ds)
@@ -293,10 +275,12 @@ ds=1
 ds=2
 PREHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -310,10 +294,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	2	1
 PREHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_16.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_16.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_16.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_16.q.out Tue Mar 19 22:37:16 2013
@@ -121,23 +121,14 @@ STAGE PLANS:
                   expr: '2'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-6
     Conditional Operator
@@ -223,23 +214,14 @@ STAGE PLANS:
                   expr: '1'
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1 partition (ds)
@@ -317,10 +299,12 @@ ds=1
 ds=2
 PREHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -334,10 +318,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	2	1
 PREHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_17.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_17.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_17.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_17.q.out Tue Mar 19 22:37:16 2013
@@ -80,28 +80,19 @@ STAGE PLANS:
                   expressions:
                         expr: _col0
                         type: string
-                        expr: _col1
-                        type: int
+                        expr: UDFToLong(_col1)
+                        type: bigint
                         expr: _col2
                         type: string
                   outputColumnNames: _col0, _col1, _col2
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                          expr: _col2
-                          type: string
-                    outputColumnNames: _col0, _col1, _col2
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                        name: default.outputtbl1
         null-subquery2:a-subquery2:inputtbl1 
           TableScan
             alias: inputtbl1
@@ -119,28 +110,19 @@ STAGE PLANS:
                   expressions:
                         expr: _col0
                         type: string
-                        expr: _col1
-                        type: int
+                        expr: UDFToLong(_col1)
+                        type: bigint
                         expr: _col2
                         type: string
                   outputColumnNames: _col0, _col1, _col2
-                  Select Operator
-                    expressions:
-                          expr: _col0
-                          type: string
-                          expr: UDFToLong(_col1)
-                          type: bigint
-                          expr: _col2
-                          type: string
-                    outputColumnNames: _col0, _col1, _col2
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.outputtbl1
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 1
+                    table:
+                        input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                        name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -230,10 +212,12 @@ ds=1
 ds=2
 PREHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '1' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=1
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -248,10 +232,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	1	1
 PREHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '2' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=2
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=1).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_18.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_18.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_18.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_18.q.out Tue Mar 19 22:37:16 2013
@@ -126,23 +126,14 @@ STAGE PLANS:
                   expr: _col1
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -217,23 +208,14 @@ STAGE PLANS:
                   expr: _col1
                   type: string
             outputColumnNames: _col0, _col1, _col2
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-                    expr: _col2
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1 partition (ds)
@@ -343,10 +325,12 @@ ds=18
 ds=28
 PREHOOK: query: select * from outputTbl1 where ds = '11' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=11
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '11' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=11
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=11).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -365,10 +349,12 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 1	1	11
 PREHOOK: query: select * from outputTbl1 where ds = '18' order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=18
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds = '18' order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=18
 #### A masked pattern was here ####
 POSTHOOK: Lineage: outputtbl1 PARTITION(ds=11).key EXPRESSION [(inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), (inputtbl1)inputtbl1.FieldSchema(name:key, type:string, comment:null), ]
@@ -387,6 +373,7 @@ POSTHOOK: Lineage: outputtbl1 PARTITION(
 8	1	18
 PREHOOK: query: select * from outputTbl1 where ds is not null order by key, values
 PREHOOK: type: QUERY
+PREHOOK: Input: default@outputtbl1
 PREHOOK: Input: default@outputtbl1@ds=11
 PREHOOK: Input: default@outputtbl1@ds=12
 PREHOOK: Input: default@outputtbl1@ds=13
@@ -396,6 +383,7 @@ PREHOOK: Input: default@outputtbl1@ds=28
 #### A masked pattern was here ####
 POSTHOOK: query: select * from outputTbl1 where ds is not null order by key, values
 POSTHOOK: type: QUERY
+POSTHOOK: Input: default@outputtbl1
 POSTHOOK: Input: default@outputtbl1@ds=11
 POSTHOOK: Input: default@outputtbl1@ds=12
 POSTHOOK: Input: default@outputtbl1@ds=13

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_19.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_19.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_19.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_19.q.out Tue Mar 19 22:37:16 2013
@@ -110,21 +110,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -185,21 +178,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1
@@ -359,21 +345,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -438,21 +417,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1
@@ -576,37 +548,30 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
+                  expr: (_col0 + _col0)
+                  type: double
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: (_col0 + _col0)
-                    type: double
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              Filter Operator
-                predicate:
-                    expr: (_col0 >= 7.0)
-                    type: boolean
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: double
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+            Filter Operator
+              predicate:
+                  expr: (_col0 >= 7.0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: double
+                      expr: _col1
+                      type: bigint
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -662,37 +627,30 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
+                  expr: (_col0 + _col0)
+                  type: double
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: (_col0 + _col0)
-                    type: double
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              Filter Operator
-                predicate:
-                    expr: (_col0 >= 7.0)
-                    type: boolean
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: double
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+            Filter Operator
+              predicate:
+                  expr: (_col0 >= 7.0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: _col0
+                      type: double
+                      expr: _col1
+                      type: bigint
+                outputColumnNames: _col0, _col1
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_2.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_2.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_2.q.out Tue Mar 19 22:37:16 2013
@@ -81,28 +81,14 @@ STAGE PLANS:
                     expr: 2
                     type: int
               outputColumnNames: _col0, _col1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: UDFToLong(_col1)
-                      type: bigint
-                outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -163,21 +149,14 @@ STAGE PLANS:
                   expr: _col1
                   type: bigint
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-                    expr: _col1
-                    type: bigint
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-3
     Map Reduce
@@ -192,28 +171,14 @@ STAGE PLANS:
                     expr: 1
                     type: int
               outputColumnNames: _col0, _col1
-              Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: UDFToLong(_col1)
-                      type: bigint
-                outputColumnNames: _col0, _col1
-                Select Operator
-                  expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: bigint
-                  outputColumnNames: _col0, _col1
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 1
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        name: default.outputtbl1
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_20.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_20.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_20.q.out Tue Mar 19 22:37:16 2013
@@ -107,26 +107,19 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
                   expr: _col1
                   type: bigint
+                  expr: _col0
+                  type: string
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col1
-                    type: bigint
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -182,26 +175,19 @@ STAGE PLANS:
           outputColumnNames: _col0, _col1
           Select Operator
             expressions:
-                  expr: _col0
-                  type: string
                   expr: _col1
                   type: bigint
+                  expr: _col0
+                  type: string
             outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col1
-                    type: bigint
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0, _col1
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_21.q.out
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_21.q.out?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_21.q.out (original)
+++ hive/branches/ptf-windowing/ql/src/test/results/clientpositive/union_remove_21.q.out Tue Mar 19 22:37:16 2013
@@ -109,22 +109,15 @@ STAGE PLANS:
             expressions:
                   expr: _col0
                   type: string
-                  expr: _col1
-                  type: bigint
-            outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
   Stage: Stage-0
     Move Operator
@@ -182,22 +175,15 @@ STAGE PLANS:
             expressions:
                   expr: _col0
                   type: string
-                  expr: _col1
-                  type: bigint
-            outputColumnNames: _col0, _col1
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: string
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.outputtbl1
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 1
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  name: default.outputtbl1
 
 
 PREHOOK: query: insert overwrite table outputTbl1

Modified: hive/branches/ptf-windowing/serde/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/build.xml?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/build.xml (original)
+++ hive/branches/ptf-windowing/serde/build.xml Tue Mar 19 22:37:16 2013
@@ -51,27 +51,6 @@
     </javac>
   </target>
 
-  <target name="thriftif" depends="check-thrift-home">
-    <echo message="Project: ${ant.project.name}"/>
-    <fail unless="thrift.home">You must set the 'thrift.home' property!</fail>
-    <echo>Executing ${thrift.home}/bin/thrift to build java serde Constants... </echo>
-    <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
-      <arg line="${thrift.args} -o ${src.dir}/gen/thrift if/serde.thrift " />
-    </exec>
-    <echo>Executing ${thrift.home}/bin/thrift to build complex.thrift test classes... </echo>
-    <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
-      <arg line="--gen java:beans -o ${src.dir}/gen/thrift if/test/complex.thrift " />
-    </exec>
-    <echo>Executing ${thrift.home}/bin/thrift to build testthrift.thrift classes... </echo>
-    <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
-      <arg line="--gen java:beans -o ${src.dir}/gen/thrift if/test/testthrift.thrift " />
-    </exec>
-    <echo>Executing ${thrift.home}/bin/thrift to build megastruct.thrift classes... </echo>
-    <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
-      <arg line="--gen java:beans -o ${src.dir}/gen/thrift if/test/megastruct.thrift " />
-    </exec>
-  </target>
-
   <target name="gen-testdata" depends="compile-test,test-jar">
     <echo message="Project: ${ant.project.name}"/>
     <echo>Generating data/files/complex.seq... </echo>

Modified: hive/branches/ptf-windowing/serde/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/ivy.xml?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/ivy.xml (original)
+++ hive/branches/ptf-windowing/serde/ivy.xml Tue Mar 19 22:37:16 2013
@@ -28,21 +28,12 @@
   <dependencies>
     <dependency org="org.apache.hive" name="hive-common" rev="${version}"
                 conf="compile->default" />
-    <dependency org="org.apache.hive" name="hive-shims" rev="${version}"
-                conf="compile->default" transitive="false"  />
     <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j-api.version}"/>
     <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j-log4j12.version}"
                 transitive="false"/>
     <dependency org="org.mockito" name="mockito-all" rev="${mockito-all.version}"/>
-    <dependency org="org.apache.thrift" name="libthrift" rev="${libthrift.version}"
-                transitive="false"/>
     <dependency org="org.apache.thrift" name="libfb303" rev="${libfb303.version}"
                 transitive="false"/>
-    <dependency org="commons-lang" name="commons-lang" rev="${commons-lang.version}"/>
-    <dependency org="commons-logging" name="commons-logging" rev="${commons-logging.version}"
-                transitive="false"/>
-    <dependency org="commons-logging" name="commons-logging-api" rev="${commons-logging-api.version}"
-                transitive="false"/>
     <dependency org="commons-codec" name="commons-codec" rev="${commons-codec.version}"
                 transitive="false"/>
     <dependency org="org.apache.avro" name="avro" rev="${avro.version}"
@@ -51,20 +42,6 @@
                 transitive="false"/>
 
     <!-- Test Dependencies -->
-    <dependency org="org.apache.hadoop" name="hadoop-common"
-                rev="${hadoop-0.23.version}"
-                conf="hadoop23.test->default">
-      <artifact name="hadoop-common" type="tests" ext="jar" m:classifier="tests"/>
-      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
-      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
-    </dependency>
-    <dependency org="org.apache.hadoop" name="hadoop-hdfs"
-                rev="${hadoop-0.23.version}"
-                conf="hadoop23.test->default">
-      <artifact name="hadoop-hdfs" type="tests" ext="jar" m:classifier="tests"/>
-      <exclude org="commons-daemon" module="commons-daemon"/><!--bad POM-->
-      <exclude org="org.apache.commons" module="commons-daemon"/><!--bad POM-->
-    </dependency>
     <dependency org="junit" name="junit" rev="${junit.version}" conf="test->default" />
   </dependencies>
 </ivy-module>

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazy/objectinspector/LazyObjectInspectorFactory.java Tue Mar 19 22:37:16 2013
@@ -19,8 +19,8 @@
 package org.apache.hadoop.hive.serde2.lazy.objectinspector;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Text;
@@ -28,18 +28,18 @@ import org.apache.hadoop.io.Text;
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
+ *
  * The reason of having caches here is that ObjectInspectors do not have an
  * internal state - so ObjectInspectors with the same construction parameters
  * should result in exactly the same ObjectInspector.
  */
 public final class LazyObjectInspectorFactory {
 
-  static HashMap<ArrayList<Object>, LazySimpleStructObjectInspector> cachedLazySimpleStructObjectInspector =
-      new HashMap<ArrayList<Object>, LazySimpleStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazySimpleStructObjectInspector> cachedLazySimpleStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazySimpleStructObjectInspector>();
 
   public static LazySimpleStructObjectInspector getLazySimpleStructObjectInspector(
       List<String> structFieldNames,
@@ -78,7 +78,8 @@ public final class LazyObjectInspectorFa
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazySimpleListObjectInspector = new HashMap<ArrayList<Object>, LazyListObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector> cachedLazySimpleListObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyListObjectInspector>();
 
   public static LazyListObjectInspector getLazySimpleListObjectInspector(
       ObjectInspector listElementObjectInspector, byte separator,
@@ -99,7 +100,8 @@ public final class LazyObjectInspectorFa
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazySimpleMapObjectInspector = new HashMap<ArrayList<Object>, LazyMapObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector> cachedLazySimpleMapObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyMapObjectInspector>();
 
   public static LazyMapObjectInspector getLazySimpleMapObjectInspector(
       ObjectInspector mapKeyObjectInspector,
@@ -125,9 +127,9 @@ public final class LazyObjectInspectorFa
     return result;
   }
 
-  static HashMap<List<Object>, LazyUnionObjectInspector>
+  static ConcurrentHashMap<List<Object>, LazyUnionObjectInspector>
     cachedLazyUnionObjectInspector =
-      new HashMap<List<Object>, LazyUnionObjectInspector>();
+      new ConcurrentHashMap<List<Object>, LazyUnionObjectInspector>();
 
   public static LazyUnionObjectInspector getLazyUnionObjectInspector(
       List<ObjectInspector> ois, byte separator, Text nullSequence,

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryObjectInspectorFactory.java Tue Mar 19 22:37:16 2013
@@ -18,18 +18,18 @@
 package org.apache.hadoop.hive.serde2.lazybinary.objectinspector;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
+ *
  * The reason of having caches here is that ObjectInspectors do not have an
  * internal state - so ObjectInspectors with the same construction parameters
  * should result in exactly the same ObjectInspector.
@@ -37,7 +37,8 @@ import org.apache.hadoop.hive.serde2.obj
 
 public final class LazyBinaryObjectInspectorFactory {
 
-  static HashMap<ArrayList<Object>, LazyBinaryStructObjectInspector> cachedLazyBinaryStructObjectInspector = new HashMap<ArrayList<Object>, LazyBinaryStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyBinaryStructObjectInspector> cachedLazyBinaryStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyBinaryStructObjectInspector>();
 
   public static LazyBinaryStructObjectInspector getLazyBinaryStructObjectInspector(
       List<String> structFieldNames,
@@ -65,7 +66,8 @@ public final class LazyBinaryObjectInspe
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyBinaryListObjectInspector> cachedLazyBinaryListObjectInspector = new HashMap<ArrayList<Object>, LazyBinaryListObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyBinaryListObjectInspector> cachedLazyBinaryListObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyBinaryListObjectInspector>();
 
   public static LazyBinaryListObjectInspector getLazyBinaryListObjectInspector(
       ObjectInspector listElementObjectInspector) {
@@ -80,7 +82,8 @@ public final class LazyBinaryObjectInspe
     return result;
   }
 
-  static HashMap<ArrayList<Object>, LazyBinaryMapObjectInspector> cachedLazyBinaryMapObjectInspector = new HashMap<ArrayList<Object>, LazyBinaryMapObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, LazyBinaryMapObjectInspector> cachedLazyBinaryMapObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, LazyBinaryMapObjectInspector>();
 
   public static LazyBinaryMapObjectInspector getLazyBinaryMapObjectInspector(
       ObjectInspector mapKeyObjectInspector,

Modified: hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java (original)
+++ hive/branches/ptf-windowing/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorFactory.java Tue Mar 19 22:37:16 2013
@@ -24,9 +24,9 @@ import java.lang.reflect.ParameterizedTy
 import java.lang.reflect.Type;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
@@ -34,10 +34,10 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * ObjectInspectorFactory is the primary way to create new ObjectInspector
  * instances.
- * 
+ *
  * SerDe classes should call the static functions in this library to create an
  * ObjectInspector to return to the caller of SerDe2.getObjectInspector().
- * 
+ *
  * The reason of having caches here is that ObjectInspector is because
  * ObjectInspectors do not have an internal state - so ObjectInspectors with the
  * same construction parameters should result in exactly the same
@@ -60,7 +60,7 @@ public final class ObjectInspectorFactor
     JAVA, THRIFT, PROTOCOL_BUFFERS
   };
 
-  private static HashMap<Type, ObjectInspector> objectInspectorCache = new HashMap<Type, ObjectInspector>();
+  private static ConcurrentHashMap<Type, ObjectInspector> objectInspectorCache = new ConcurrentHashMap<Type, ObjectInspector>();
 
   public static ObjectInspector getReflectionObjectInspector(Type t,
       ObjectInspectorOptions options) {
@@ -197,7 +197,8 @@ public final class ObjectInspectorFactor
     return oi;
   }
 
-  static HashMap<ObjectInspector, StandardListObjectInspector> cachedStandardListObjectInspector = new HashMap<ObjectInspector, StandardListObjectInspector>();
+  static ConcurrentHashMap<ObjectInspector, StandardListObjectInspector> cachedStandardListObjectInspector =
+      new ConcurrentHashMap<ObjectInspector, StandardListObjectInspector>();
 
   public static StandardListObjectInspector getStandardListObjectInspector(
       ObjectInspector listElementObjectInspector) {
@@ -216,7 +217,8 @@ public final class ObjectInspectorFactor
   }
 
 
-  static HashMap<List<ObjectInspector>, StandardMapObjectInspector> cachedStandardMapObjectInspector = new HashMap<List<ObjectInspector>, StandardMapObjectInspector>();
+  static ConcurrentHashMap<List<ObjectInspector>, StandardMapObjectInspector> cachedStandardMapObjectInspector =
+      new ConcurrentHashMap<List<ObjectInspector>, StandardMapObjectInspector>();
 
   public static StandardMapObjectInspector getStandardMapObjectInspector(
       ObjectInspector mapKeyObjectInspector,
@@ -242,9 +244,9 @@ public final class ObjectInspectorFactor
           mapValueObjectInspector, constantValue);
   }
 
-  static HashMap<List<ObjectInspector>, StandardUnionObjectInspector>
+  static ConcurrentHashMap<List<ObjectInspector>, StandardUnionObjectInspector>
     cachedStandardUnionObjectInspector =
-      new HashMap<List<ObjectInspector>, StandardUnionObjectInspector>();
+      new ConcurrentHashMap<List<ObjectInspector>, StandardUnionObjectInspector>();
 
   public static StandardUnionObjectInspector getStandardUnionObjectInspector(
       List<ObjectInspector> unionObjectInspectors) {
@@ -257,7 +259,8 @@ public final class ObjectInspectorFactor
     return result;
   }
 
-  static HashMap<ArrayList<List<?>>, StandardStructObjectInspector> cachedStandardStructObjectInspector = new HashMap<ArrayList<List<?>>, StandardStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<List<?>>, StandardStructObjectInspector> cachedStandardStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<List<?>>, StandardStructObjectInspector>();
 
   public static StandardStructObjectInspector getStandardStructObjectInspector(
       List<String> structFieldNames,
@@ -283,7 +286,8 @@ public final class ObjectInspectorFactor
     return result;
   }
 
-  static HashMap<List<StructObjectInspector>, UnionStructObjectInspector> cachedUnionStructObjectInspector = new HashMap<List<StructObjectInspector>, UnionStructObjectInspector>();
+  static ConcurrentHashMap<List<StructObjectInspector>, UnionStructObjectInspector> cachedUnionStructObjectInspector =
+      new ConcurrentHashMap<List<StructObjectInspector>, UnionStructObjectInspector>();
 
   public static UnionStructObjectInspector getUnionStructObjectInspector(
       List<StructObjectInspector> structObjectInspectors) {
@@ -296,7 +300,8 @@ public final class ObjectInspectorFactor
     return result;
   }
 
-  static HashMap<ArrayList<Object>, ColumnarStructObjectInspector> cachedColumnarStructObjectInspector = new HashMap<ArrayList<Object>, ColumnarStructObjectInspector>();
+  static ConcurrentHashMap<ArrayList<Object>, ColumnarStructObjectInspector> cachedColumnarStructObjectInspector =
+      new ConcurrentHashMap<ArrayList<Object>, ColumnarStructObjectInspector>();
 
   public static ColumnarStructObjectInspector getColumnarStructObjectInspector(
       List<String> structFieldNames,

Modified: hive/branches/ptf-windowing/service/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/service/build.xml?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/service/build.xml (original)
+++ hive/branches/ptf-windowing/service/build.xml Tue Mar 19 22:37:16 2013
@@ -22,15 +22,6 @@
 
   <import file="../build-common.xml"/>
 
-  <target name="thriftif" depends="check-thrift-home">
-    <echo message="Project: ${ant.project.name}"/>
-    <fail unless="thrift.home">You must set the 'thrift.home' property!</fail>
-    <echo>Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/hive_service.thrift</echo>
-    <exec executable="${thrift.home}/bin/thrift"  failonerror="true" dir=".">
-      <arg line="${thrift.args} -I ${basedir}/include -I ${basedir}/.. -o ${src.dir}/gen/thrift if/hive_service.thrift " />
-    </exec>
-  </target>
-
   <target name="compile" depends="init,ivy-retrieve">
     <echo message="Project: ${ant.project.name}"/>
     <javac

Modified: hive/branches/ptf-windowing/service/ivy.xml
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/service/ivy.xml?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/service/ivy.xml (original)
+++ hive/branches/ptf-windowing/service/ivy.xml Tue Mar 19 22:37:16 2013
@@ -29,16 +29,7 @@
   <dependencies>
     <dependency org="org.apache.hive" name="hive-exec" rev="${version}"
                 conf="compile->default" />
-    <dependency org="org.apache.thrift" name="libthrift" rev="${libthrift.version}"
-                transitive="false"/>
-    <dependency org="org.apache.thrift" name="libfb303" rev="${libfb303.version}"
-                transitive="false"/>
-    <dependency org="commons-logging" name="commons-logging" rev="${commons-logging.version}"
-                transitive="false"/>
-    <dependency org="commons-logging" name="commons-logging-api" rev="${commons-logging-api.version}"
-                transitive="false"/>
 
     <!-- Test Dependencies -->
-    <dependency org="junit" name="junit" rev="${junit.version}" conf="test->default" />
   </dependencies>
 </ivy-module>

Modified: hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/branches/ptf-windowing/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Mar 19 22:37:16 2013
@@ -540,10 +540,16 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws
+  public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+    throws IOException {
+    throw new UnsupportedOperationException("Tokens are not supported in current hadoop version");
+  }
+
+  @Override
+  public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws
     IOException, InterruptedException {
     try {
-      Subject.doAs(SecurityUtil.getSubject(ugi),pvea);
+      return Subject.doAs(SecurityUtil.getSubject(ugi),pvea);
     } catch (PrivilegedActionException e) {
       throw new IOException(e);
     }
@@ -555,6 +561,21 @@ public class Hadoop20Shims implements Ha
   }
 
   @Override
+  public void loginUserFromKeytab(String principal, String keytabFile) throws IOException {
+    throw new UnsupportedOperationException("Kerberos login is not supported in current hadoop version");
+  }
+
+  @Override
+  public UserGroupInformation createProxyUser(String userName) throws IOException {
+    return createRemoteUser(userName, null);
+  }
+
+  @Override
+  public boolean isSecurityEnabled() {
+    return false;
+  }
+
+  @Override
   public String getTaskAttemptLogUrl(JobConf conf,
     String taskTrackerHttpAddress, String taskAttemptId)
     throws MalformedURLException {

Modified: hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Tue Mar 19 22:37:16 2013
@@ -39,6 +39,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
+import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
 import org.apache.hadoop.http.HtmlQuoting;
 import org.apache.hadoop.io.Text;
@@ -59,6 +60,7 @@ import org.apache.hadoop.mapred.TaskID;
 import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
 import org.apache.hadoop.mapred.lib.CombineFileSplit;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
@@ -463,19 +465,19 @@ public abstract class HadoopShimsSecure 
 
     return ToolRunner.run(har, args.toArray(new String[0]));
   }
-  
+
   /*
    * This particular instance is for Hadoop 1.0 which creates an archive
    * with only the relative path of the archived directory stored within
    * the archive as compared to the full path in case of earlier versions.
    * See this api in Hadoop20Shims for comparison.
    */
-  public URI getHarUri(URI original, URI base, URI originalBase) 
+  public URI getHarUri(URI original, URI base, URI originalBase)
     throws URISyntaxException {
     URI relative = originalBase.relativize(original);
     if (relative.isAbsolute()) {
       throw new URISyntaxException("Couldn't create URI for location.",
-                                   "Relative: " + relative + " Base: " 
+                                   "Relative: " + relative + " Base: "
                                    + base + " OriginalBase: " + originalBase);
     }
 
@@ -538,8 +540,27 @@ public abstract class HadoopShimsSecure 
   }
 
   @Override
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws IOException, InterruptedException {
-    ugi.doAs(pvea);
+  public void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService) throws IOException {
+    Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
+    delegationToken.decodeFromUrlString(tokenStr);
+    delegationToken.setService(new Text(tokenService));
+    ugi.addToken(delegationToken);
+  }
+
+  @Override
+  public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws IOException, InterruptedException {
+    return ugi.doAs(pvea);
+  }
+
+  @Override
+  public UserGroupInformation createProxyUser(String userName) throws IOException {
+    return UserGroupInformation.createProxyUser(
+        userName, UserGroupInformation.getLoginUser());
+  }
+
+  @Override
+  public boolean isSecurityEnabled() {
+    return UserGroupInformation.isSecurityEnabled();
   }
 
   @Override
@@ -557,6 +578,12 @@ public abstract class HadoopShimsSecure 
   }
 
   @Override
+  public void loginUserFromKeytab(String principal, String keytabFile) throws IOException {
+    String hostPrincipal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
+    UserGroupInformation.loginUserFromKeytab(hostPrincipal, keytabFile);
+  }
+
+  @Override
   abstract public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception;
 
   @Override

Modified: hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java (original)
+++ hive/branches/ptf-windowing/shims/src/common-secure/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java Tue Mar 19 22:37:16 2013
@@ -40,6 +40,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Client;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@@ -62,6 +64,7 @@ import org.apache.thrift.transport.TTran
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION;
 
  /**
   * Functions that bridge Thrift's SASL transports to Hadoop's
@@ -76,6 +79,14 @@ import org.apache.thrift.transport.TTran
    }
 
    @Override
+   public Client createClientWithConf(String authType) {
+     Configuration conf = new Configuration();
+     conf.set(HADOOP_SECURITY_AUTHENTICATION, authType);
+     UserGroupInformation.setConfiguration(conf);
+     return new Client();
+   }
+
+   @Override
    public Server createServer(String keytabFile, String principalConf) throws TTransportException {
      return new Server(keytabFile, principalConf);
    }
@@ -233,7 +244,7 @@ import org.apache.thrift.transport.TTran
      /**
       * Create a server with a kerberos keytab/principal.
       */
-     private Server(String keytabFile, String principalConf)
+     protected Server(String keytabFile, String principalConf)
        throws TTransportException {
        if (keytabFile == null || keytabFile.isEmpty()) {
          throw new TTransportException("No keytab specified");
@@ -293,7 +304,15 @@ import org.apache.thrift.transport.TTran
       */
      @Override
      public TProcessor wrapProcessor(TProcessor processor) {
-      return new TUGIAssumingProcessor(processor, secretManager);
+       return new TUGIAssumingProcessor(processor, secretManager, true);
+     }
+
+     /**
+      * Wrap a TProcessor to capture the client information like connecting userid, ip etc
+      */
+     @Override
+     public TProcessor wrapNonAssumingProcessor(TProcessor processor) {
+      return new TUGIAssumingProcessor(processor, secretManager, false);
      }
 
     protected DelegationTokenStore getTokenStore(Configuration conf)
@@ -398,6 +417,18 @@ import org.apache.thrift.transport.TTran
        }
      };
 
+     private static ThreadLocal<String> remoteUser = new ThreadLocal<String> () {
+       @Override
+       protected synchronized String initialValue() {
+         return null;
+       }
+     };
+
+     @Override
+     public String getRemoteUser() {
+       return remoteUser.get();
+     }
+     
     /** CallbackHandler for SASL DIGEST-MD5 mechanism */
     // This code is pretty much completely based on Hadoop's
     // SaslRpcServer.SaslDigestCallbackHandler - the only reason we could not
@@ -479,12 +510,15 @@ import org.apache.thrift.transport.TTran
       *
       * This is used on the server side to set the UGI for each specific call.
       */
-     private class TUGIAssumingProcessor implements TProcessor {
+     protected class TUGIAssumingProcessor implements TProcessor {
        final TProcessor wrapped;
        DelegationTokenSecretManager secretManager;
-       TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager) {
+       boolean useProxy;
+       TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager,
+           boolean useProxy) {
          this.wrapped = wrapped;
          this.secretManager = secretManager;
+         this.useProxy = useProxy;
        }
 
        public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException {
@@ -513,17 +547,23 @@ import org.apache.thrift.transport.TTran
          remoteAddress.set(socket.getInetAddress());
          UserGroupInformation clientUgi = null;
          try {
-           clientUgi = UserGroupInformation.createProxyUser(
-              endUser, UserGroupInformation.getLoginUser());
-           return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
-               public Boolean run() {
-                 try {
-                   return wrapped.process(inProt, outProt);
-                 } catch (TException te) {
-                   throw new RuntimeException(te);
+           if (useProxy) {
+             clientUgi = UserGroupInformation.createProxyUser(
+               endUser, UserGroupInformation.getLoginUser());
+             remoteUser.set(clientUgi.getShortUserName());
+             return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
+                 public Boolean run() {
+                   try {
+                     return wrapped.process(inProt, outProt);
+                   } catch (TException te) {
+                     throw new RuntimeException(te);
+                   }
                  }
-               }
-             });
+               });
+           } else {
+             remoteUser.set(endUser);
+             return wrapped.process(inProt, outProt);
+           }
          } catch (RuntimeException rte) {
            if (rte.getCause() instanceof TException) {
              throw (TException)rte.getCause();

Modified: hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Mar 19 22:37:16 2013
@@ -192,15 +192,15 @@ public interface HadoopShims {
   public void closeAllForUGI(UserGroupInformation ugi);
 
   public UserGroupInformation getUGIForConf(Configuration conf) throws LoginException, IOException;
-
   /**
    * Used by metastore server to perform requested rpc in client context.
+   * @param <T>
    * @param ugi
    * @param pvea
    * @throws IOException
    * @throws InterruptedException
    */
-  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws
+  public <T> T doAs(UserGroupInformation ugi, PrivilegedExceptionAction<T> pvea) throws
     IOException, InterruptedException;
 
   /**
@@ -226,6 +226,12 @@ public interface HadoopShims {
   public boolean isSecureShimImpl();
 
   /**
+   * Return true if the hadoop configuration has security enabled
+   * @return
+   */
+  public boolean isSecurityEnabled();
+
+  /**
    * Get the string form of the token given a token signature.
    * The signature is used as the value of the "service" field in the token for lookup.
    * Ref: AbstractDelegationTokenSelector in Hadoop. If there exists such a token
@@ -242,6 +248,16 @@ public interface HadoopShims {
    */
   String getTokenStrForm(String tokenSignature) throws IOException;
 
+  /**
+   * Add a delegation token to the given ugi
+   * @param ugi
+   * @param tokenStr
+   * @param tokenService
+   * @throws IOException
+   */
+  void setTokenStr(UserGroupInformation ugi, String tokenStr, String tokenService)
+    throws IOException;
+
 
   enum JobTrackerState { INITIALIZING, RUNNING };
 
@@ -290,6 +306,12 @@ public interface HadoopShims {
   public String getJobLauncherHttpAddress(Configuration conf);
 
 
+ /**
+  *  Perform kerberos login using the given principal and keytab
+ * @throws IOException
+  */
+  public void loginUserFromKeytab(String principal, String keytabFile) throws IOException;
+
   /**
    * Move the directory/file to trash. In case of the symlinks or mount points, the file is
    * moved to the trashbin in the actual volume of the path p being deleted
@@ -321,6 +343,13 @@ public interface HadoopShims {
   public short getDefaultReplication(FileSystem fs, Path path);
 
   /**
+   * Create the proxy ugi for the given userid
+   * @param userName
+   * @return
+   */
+  UserGroupInformation createProxyUser(String userName) throws IOException;
+
+  /**
    * InputSplitShim.
    *
    */
@@ -380,4 +409,5 @@ public interface HadoopShims {
     RecordReader getRecordReader(JobConf job, InputSplitShim split, Reporter reporter,
         Class<RecordReader<K, V>> rrClass) throws IOException;
   }
+
 }

Modified: hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java (original)
+++ hive/branches/ptf-windowing/shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java Tue Mar 19 22:37:16 2013
@@ -37,6 +37,11 @@ import org.apache.thrift.transport.TTran
        "The current version of Hadoop does not support Authentication");
    }
 
+   public Client createClientWithConf(String authType) {
+     throw new UnsupportedOperationException(
+       "The current version of Hadoop does not support Authentication");
+   }
+   
    public Server createServer(String keytabFile, String principalConf)
      throws TTransportException {
      throw new UnsupportedOperationException(
@@ -67,7 +72,9 @@ import org.apache.thrift.transport.TTran
    public static abstract class Server {
      public abstract TTransportFactory createTransportFactory() throws TTransportException;
      public abstract TProcessor wrapProcessor(TProcessor processor);
+     public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor);
      public abstract InetAddress getRemoteAddress();
+     public abstract String getRemoteUser();
      public abstract void startDelegationTokenSecretManager(Configuration conf) throws IOException;
      public abstract String getDelegationToken(String owner, String renewer) 
      throws IOException, InterruptedException;

Modified: hive/branches/ptf-windowing/testutils/ptest/hivetest.py
URL: http://svn.apache.org/viewvc/hive/branches/ptf-windowing/testutils/ptest/hivetest.py?rev=1458549&r1=1458548&r2=1458549&view=diff
==============================================================================
--- hive/branches/ptf-windowing/testutils/ptest/hivetest.py (original)
+++ hive/branches/ptf-windowing/testutils/ptest/hivetest.py Tue Mar 19 22:37:16 2013
@@ -342,6 +342,8 @@ def run_other_tests():
               'sed -e "s:[^/]*/::g"',
               'grep -v TestSerDe.class',
               'grep -v TestHiveMetaStore.class',
+              'grep -v TestBeeLineDriver.class',
+              'grep -v TestHiveServer2Concurrency.class',
               'grep -v TestCliDriver.class',
               'grep -v TestNegativeCliDriver.class',
               'grep -v ".*\$.*\.class"',
@@ -354,6 +356,8 @@ def run_other_tests():
               'sed -e "s:[^/]*/::g"',
               'grep -v TestSerDe.class',
               'grep -v TestHiveMetaStore.class',
+              'grep -v TestBeeLineDriver.class',
+              'grep -v TestHiveServer2Concurrency.class',
               'grep -v TestCliDriver.class',
               'grep -v TestNegativeCliDriver.class',
               'grep -v ".*\$.*\.class"',