You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/09/08 06:38:26 UTC

svn commit: r1623263 [27/28] - in /hive/branches/spark: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ ant/src/org/apache/hadoop/hive/ant/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/ bin/...

Modified: hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/infer_bucket_sort_grouping_operators.q.out Mon Sep  8 04:38:17 2014
@@ -51,12 +51,12 @@ STAGE PLANS:
                 keys: key (type: string), value (type: string), '0' (type: string)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 87 Data size: 17436 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
                   sort order: +++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
-                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 87 Data size: 17436 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
@@ -64,14 +64,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 43 Data size: 8617 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint)
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 43 Data size: 8617 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 43 Data size: 8617 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -238,12 +238,12 @@ STAGE PLANS:
                 keys: key (type: string), value (type: string), '0' (type: string)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
                   sort order: +++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
-                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
@@ -251,14 +251,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint)
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -425,12 +425,12 @@ STAGE PLANS:
                 keys: key (type: string), value (type: string), '0' (type: string)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
                   sort order: +++
                   Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
-                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
                   value expressions: _col3 (type: bigint)
       Reduce Operator Tree:
         Group By Operator
@@ -438,14 +438,14 @@ STAGE PLANS:
           keys: KEY._col0 (type: string), KEY._col1 (type: string), KEY._col2 (type: string)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col0 (type: string), _col1 (type: string), _col3 (type: bigint)
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/optimize_nullscan.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/optimize_nullscan.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/optimize_nullscan.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/optimize_nullscan.q.out Mon Sep  8 04:38:17 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/show_conf.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/show_conf.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/show_conf.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/show_conf.q.out Mon Sep  8 04:38:17 2014
@@ -7,4 +7,4 @@ PREHOOK: query: show conf "hive.stats.re
 PREHOOK: type: SHOWCONF
 POSTHOOK: query: show conf "hive.stats.retries.wait"
 POSTHOOK: type: SHOWCONF
-3000	INT	The base waiting window (in milliseconds) before the next retry. The actual wait time is calculated by baseWindow * failures baseWindow * (failure  1) * (random number between [0.0,1.0]).
+3000ms	STRING(TIME)	Expects a time value with unit (d/day, h/hour, m/min, s/sec, ms/msec, us/usec, ns/nsec), which is msec if not specified. The base waiting window before the next retry. The actual wait time is calculated by baseWindow * failures baseWindow * (failure + 1) * (random number between [0.0,1.0]).

Modified: hive/branches/spark/ql/src/test/results/clientpositive/show_tables.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/show_tables.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/show_tables.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/show_tables.q.out Mon Sep  8 04:38:17 2014
@@ -170,6 +170,166 @@ PREHOOK: query: SHOW TABLES IN test_db L
 PREHOOK: type: SHOWTABLES
 POSTHOOK: query: SHOW TABLES IN test_db LIKE "nomatch"
 POSTHOOK: type: SHOWTABLES
+PREHOOK: query: -- SHOW TABLE EXTENDED basic syntax tests and wildcard
+SHOW TABLE EXTENDED IN test_db LIKE foo
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: -- SHOW TABLE EXTENDED basic syntax tests and wildcard
+SHOW TABLE EXTENDED IN test_db LIKE foo
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:foo
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+PREHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE "foo"
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE "foo"
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:foo
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+PREHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE 'foo'
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE 'foo'
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:foo
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+PREHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE `foo`
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE `foo`
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:foo
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+PREHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE 'ba*'
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE 'ba*'
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:bar
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+tableName:baz
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+PREHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE "ba*"
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE "ba*"
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:bar
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+tableName:baz
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+PREHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE `ba*`
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: SHOW TABLE EXTENDED IN test_db LIKE `ba*`
+POSTHOOK: type: SHOW_TABLESTATUS
+tableName:bar
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
+tableName:baz
+#### A masked pattern was here ####
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { i32 a}
+partitioned:false
+partitionColumns:
+totalNumberFiles:0
+totalFileSize:0
+maxFileSize:0
+minFileSize:0
+#### A masked pattern was here ####
+
 PREHOOK: query: -- SHOW TABLES from a database with a name that requires escaping
 CREATE DATABASE `database`
 PREHOOK: type: CREATEDATABASE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket2.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket2.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket2.q.out Mon Sep  8 04:38:17 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket3.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket3.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket3.q.out Mon Sep  8 04:38:17 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket4.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket4.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/tez/bucket4.q.out Mon Sep  8 04:38:17 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out Mon Sep  8 04:38:17 2014
@@ -83,7 +83,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Map Join MAPJOIN[16][bigTable=a] in task 'Map 3' is a cross product
+Warning: Map Join MAPJOIN[18][bigTable=a] in task 'Map 3' is a cross product
 PREHOOK: query: explain select * from B d1 join B d2 on d1.key = d2.key join A
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select * from B d1 join B d2 on d1.key = d2.key join A
@@ -171,7 +171,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Map Join MAPJOIN[23][bigTable=a] in task 'Map 4' is a cross product
+Warning: Map Join MAPJOIN[25][bigTable=a] in task 'Map 4' is a cross product
 PREHOOK: query: explain select * from A join 
          (select d1.key 
           from B d1 join B d2 on d1.key = d2.key 
@@ -396,7 +396,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Map Join MAPJOIN[28][bigTable=?] in task 'Reducer 5' is a cross product
+Warning: Map Join MAPJOIN[30][bigTable=?] in task 'Reducer 5' is a cross product
 PREHOOK: query: explain select * from 
 (select A.key from A group by key) ss join 
 (select d1.key from B d1 join B d2 on d1.key = d2.key where 1 = 1 group by d1.key) od1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/metadataonly1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/metadataonly1.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/tez/metadataonly1.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/tez/metadataonly1.q.out Mon Sep  8 04:38:17 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out Mon Sep  8 04:38:17 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/temp_table.q.out Mon Sep  8 04:38:17 2014
@@ -168,14 +168,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: foo
-          Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: key (type: string), value (type: string)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE
             Limit
               Number of rows: 10
-              Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: select * from foo limit 10

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_string_concat.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_string_concat.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_string_concat.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/vector_string_concat.q.out Mon Sep  8 04:38:17 2014
@@ -117,14 +117,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: over1korc
-          Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: s (type: string), concat(concat('      ', s), '      ') (type: string), concat(concat('|', rtrim(concat(concat('      ', s), '      '))), '|') (type: string)
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
             Limit
               Number of rows: 20
-              Statistics: Num rows: 20 Data size: 5920 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: SELECT s AS `string`,

Modified: hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out Mon Sep  8 04:38:17 2014
@@ -924,14 +924,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: alltypesorc
-          Statistics: Num rows: 1193 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (((((cstring1 rlike 'a.*') and (cstring2 like '%ss%')) or ((1 <> cboolean2) and ((csmallint < 79.553) and (-257 <> ctinyint)))) or ((cdouble > ctinyint) and (cfloat >= cint))) or ((cint < cbigint) and (ctinyint > cbigint))) (type: boolean)
-            Statistics: Num rows: 959 Data size: 303244 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: cint (type: int), cdouble (type: double), ctimestamp2 (type: timestamp), cstring1 (type: string), cboolean2 (type: boolean), ctinyint (type: tinyint), cfloat (type: float), ctimestamp1 (type: timestamp), csmallint (type: smallint), cbigint (type: bigint), (-3728 * cbigint) (type: bigint), (- cint) (type: int), (-863.257 - cint) (type: double), (- csmallint) (type: smallint), (csmallint - (- csmallint)) (type: smallint), ((csmallint - (- csmallint)) + (- csmallint)) (type: smallint), (cint / cint) (type: double), ((-863.257 - cint) - -26.28) (type: double), (- cfloat) (type: float), (cdouble * -89010) (type: double), (ctinyint / 988888) (type: double), (- ctinyint) (type: tinyint), (79.553 / ctinyint) (type: double)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22
-              Statistics: Num rows: 959 Data size: 303244 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: SELECT cint,
@@ -2317,14 +2314,11 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: alltypesorc
-          Statistics: Num rows: 1347 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (((((197.0 > ctinyint) and (cint = cbigint)) or (cbigint = 359)) or (cboolean1 < 0)) or ((cstring1 like '%ss') and (cfloat <= ctinyint))) (type: boolean)
-            Statistics: Num rows: 1347 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: cint (type: int), cbigint (type: bigint), cstring1 (type: string), cboolean1 (type: boolean), cfloat (type: float), cdouble (type: double), ctimestamp2 (type: timestamp), csmallint (type: smallint), cstring2 (type: string), cboolean2 (type: boolean), (cint / cbigint) (type: double), (cbigint % 79.553) (type: double), (- (cint / cbigint)) (type: double), (10.175 % cfloat) (type: double), (- cfloat) (type: float), (cfloat - (- cfloat)) (type: float), ((cfloat - (- cfloat)) % -6432) (type: float), (cdouble * csmallint) (type: double), (- cdouble) (type: double), (- cbigint) (type: bigint), (cfloat - (cint / cbigint)) (type: double), (- csmallint) (type: smallint), (3569 % cbigint) (type: bigint), (359 - cdouble) (type: double), (- csmallint) (type: smallint)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24
-              Statistics: Num rows: 1347 Data size: 377237 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: SELECT cint,

Modified: hive/branches/spark/ql/src/test/results/clientpositive/union20.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/union20.q.out?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/union20.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/union20.q.out Mon Sep  8 04:38:17 2014
@@ -132,14 +132,14 @@ STAGE PLANS:
             0 {KEY.reducesinkkey0} {VALUE._col0}
             1 {KEY.reducesinkkey0} {VALUE._col0}
           outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 6 Data size: 1632 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 36 Data size: 9792 Basic stats: COMPLETE Column stats: PARTIAL
           Select Operator
             expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
             outputColumnNames: _col0, _col1, _col2, _col3
-            Statistics: Num rows: 6 Data size: 1632 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 36 Data size: 9792 Basic stats: COMPLETE Column stats: PARTIAL
             File Output Operator
               compressed: false
-              Statistics: Num rows: 6 Data size: 1632 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 36 Data size: 9792 Basic stats: COMPLETE Column stats: PARTIAL
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyBinary.java Mon Sep  8 04:38:17 2014
@@ -55,6 +55,8 @@ public class LazyBinary extends LazyPrim
                 "decoded the data.");
     }
     byte[] decoded = arrayByteBase64 ? Base64.decodeBase64(recv) : recv;
+    // use the original bytes in case decoding should fail
+    decoded = decoded.length > 0 ? decoded : recv;
     data.set(decoded, 0, decoded.length);
   }
 

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyStruct.java Mon Sep  8 04:38:17 2014
@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import com.google.common.primitives.Bytes;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -28,6 +29,7 @@ import org.apache.hadoop.hive.serde2.Ser
 import org.apache.hadoop.hive.serde2.StructObject;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.Text;
 
 /**
@@ -285,4 +287,59 @@ public class LazyStruct extends LazyNonP
   public long getRawDataSerializedSize() {
     return serializedSize;
   }
+
+  // parse the struct using multi-char delimiter
+  public void parseMultiDelimit(byte[] rawRow, byte[] fieldDelimit) {
+    if (rawRow == null || fieldDelimit == null) {
+      return;
+    }
+    if (fields == null) {
+      List<? extends StructField> fieldRefs = ((StructObjectInspector) oi).getAllStructFieldRefs();
+      fields = new LazyObject[fieldRefs.size()];
+      for (int i = 0; i < fields.length; i++) {
+        fields[i] = LazyFactory.createLazyObject(fieldRefs.get(i).getFieldObjectInspector());
+      }
+      fieldInited = new boolean[fields.length];
+      startPosition = new int[fields.length + 1];
+    }
+    // the indexes of the delimiters
+    int[] delimitIndexes = findIndexes(rawRow, fieldDelimit);
+    int diff = fieldDelimit.length - 1;
+    // first field always starts from 0, even when missing
+    startPosition[0] = 0;
+    for (int i = 1; i < fields.length; i++) {
+      if (delimitIndexes[i - 1] != -1) {
+        int start = delimitIndexes[i - 1] + fieldDelimit.length;
+        startPosition[i] = start - i * diff;
+      } else {
+        startPosition[i] = length + 1;
+      }
+    }
+    startPosition[fields.length] = length + 1;
+    Arrays.fill(fieldInited, false);
+    parsed = true;
+  }
+
+  // find all the indexes of the sub byte[]
+  private int[] findIndexes(byte[] array, byte[] target) {
+    if (fields.length <= 1) {
+      return new int[0];
+    }
+    int[] indexes = new int[fields.length - 1];
+    Arrays.fill(indexes, -1);
+    indexes[0] = Bytes.indexOf(array, target);
+    if (indexes[0] == -1) {
+      return indexes;
+    }
+    int indexInNewArray = indexes[0];
+    for (int i = 1; i < indexes.length; i++) {
+      array = Arrays.copyOfRange(array, indexInNewArray + target.length, array.length);
+      indexInNewArray = Bytes.indexOf(array, target);
+      if (indexInNewArray == -1) {
+        break;
+      }
+      indexes[i] = indexInNewArray + indexes[i - 1] + target.length;
+    }
+    return indexes;
+  }
 }

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Mon Sep  8 04:38:17 2014
@@ -69,9 +69,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.StringUtils;
@@ -108,7 +106,7 @@ public final class ObjectInspectorUtils 
       PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
       if (!(poi instanceof AbstractPrimitiveWritableObjectInspector)) {
         return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-            (PrimitiveTypeInfo)poi.getTypeInfo());
+            poi.getTypeInfo());
       }
     }
     return oi;
@@ -292,24 +290,21 @@ public final class ObjectInspectorUtils 
     switch (oi.getCategory()) {
     case PRIMITIVE: {
       PrimitiveObjectInspector loi = (PrimitiveObjectInspector) oi;
-      switch (objectInspectorOption) {
-      case DEFAULT: {
-        if (loi.preferWritable()) {
-          result = loi.getPrimitiveWritableObject(loi.copyObject(o));
-        } else {
-          result = loi.getPrimitiveJavaObject(o);
-        }
-        break;
+      if (objectInspectorOption == ObjectInspectorCopyOption.DEFAULT) {
+        objectInspectorOption = loi.preferWritable() ?
+            ObjectInspectorCopyOption.WRITABLE : ObjectInspectorCopyOption.JAVA;
       }
-      case JAVA: {
+      switch (objectInspectorOption) {
+      case JAVA:
         result = loi.getPrimitiveJavaObject(o);
+        if (loi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
+          result = PrimitiveObjectInspectorFactory.javaTimestampObjectInspector.copyObject(result);
+        }
         break;
-      }
-      case WRITABLE: {
+      case WRITABLE:
         result = loi.getPrimitiveWritableObject(loi.copyObject(o));
         break;
       }
-      }
       break;
     }
     case LIST: {

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java Mon Sep  8 04:38:17 2014
@@ -32,17 +32,6 @@ public class JavaBinaryObjectInspector e
   }
 
   @Override
-  public byte[] copyObject(Object o) {
-    if (null == o){
-      return null;
-    }
-    byte[] incoming = (byte[])o;
-    byte[] outgoing = new byte[incoming.length];
-    System.arraycopy(incoming, 0, outgoing, 0, incoming.length);
-    return outgoing;
-  }
-
-  @Override
   public BytesWritable getPrimitiveWritableObject(Object o) {
     return o == null ? null : new BytesWritable((byte[])o);
   }

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java Mon Sep  8 04:38:17 2014
@@ -19,11 +19,11 @@ package org.apache.hadoop.hive.serde2.ob
 
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 public class JavaHiveVarcharObjectInspector extends AbstractPrimitiveJavaObjectInspector
-implements SettableHiveVarcharObjectInspector {
+  implements SettableHiveVarcharObjectInspector {
 
   // no-arg ctor required for Kyro serialization
   public JavaHiveVarcharObjectInspector() {
@@ -38,9 +38,8 @@ implements SettableHiveVarcharObjectInsp
     if (o == null) {
       return null;
     }
-    HiveVarchar value = (HiveVarchar)o;
-    if (BaseCharUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeInfo)typeInfo)) {
+    HiveVarchar value = (HiveVarchar) o;
+    if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) {
       return value;
     }
     // value needs to be converted to match the type params (length, etc).
@@ -52,40 +51,27 @@ implements SettableHiveVarcharObjectInsp
     if (o == null) {
       return null;
     }
-    return getWritableWithParams((HiveVarchar)o);
-  }
-
-  private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
-    HiveVarchar hv = new HiveVarchar(val, getMaxLength());
-    return hv;
-  }
-
-  private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
-    HiveVarcharWritable newValue = new HiveVarcharWritable();
-    newValue.set(val, getMaxLength());
-    return newValue;
+    return getWritableWithParams((HiveVarchar) o);
   }
 
   @Override
   public Object set(Object o, HiveVarchar value) {
-    if (BaseCharUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeInfo)typeInfo)) {
-      return o = value;
+    if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) {
+      return value;
     } else {
       // Otherwise value may be too long, convert to appropriate value based on params
-      return o = new HiveVarchar(value,  getMaxLength());
+      return new HiveVarchar(value, getMaxLength());
     }
   }
 
   @Override
   public Object set(Object o, String value) {
-    return o = new HiveVarchar(value, getMaxLength());
+    return new HiveVarchar(value, getMaxLength());
   }
 
   @Override
   public Object create(HiveVarchar value) {
-    HiveVarchar hc = new HiveVarchar(value, getMaxLength());
-    return hc;
+    return new HiveVarchar(value, getMaxLength());
   }
 
   public int getMaxLength() {
@@ -93,4 +79,14 @@ implements SettableHiveVarcharObjectInsp
     return ti.getLength();
   }
 
+  private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
+    return new HiveVarchar(val, getMaxLength());
+  }
+
+  private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
+    HiveVarcharWritable newValue = new HiveVarcharWritable();
+    newValue.set(val, getMaxLength());
+    return newValue;
+  }
+
 }

Modified: hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java (original)
+++ hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java Mon Sep  8 04:38:17 2014
@@ -39,6 +39,17 @@ public class JavaTimestampObjectInspecto
     return o == null ? null : (Timestamp) o;
   }
 
+  @Override
+  public Object copyObject(Object o) {
+    if (o == null) {
+      return null;
+    }
+    Timestamp source = (Timestamp) o;
+    Timestamp copy = new Timestamp(source.getTime());
+    copy.setNanos(source.getNanos());
+    return copy;
+  }
+
   public Timestamp get(Object o) {
     return (Timestamp) o;
   }

Modified: hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java (original)
+++ hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java Mon Sep  8 04:38:17 2014
@@ -388,7 +388,7 @@ public class TestLazyPrimitive extends T
     initLazyObject(ba, new byte[] {'2', '?', '3'}, 0, 3);
     assertEquals(new BytesWritable(new byte[] {'2', '?', '3'}), ba.getWritableObject());
     initLazyObject(ba, new byte[] {'\n'}, 0, 1);
-    assertEquals(new BytesWritable(new byte[] {}), ba.getWritableObject());
+    assertEquals(new BytesWritable(new byte[] {'\n'}), ba.getWritableObject());
   }
 
   public void testLazyTimestamp() throws Throwable {

Modified: hive/branches/spark/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Mon Sep  8 04:38:17 2014
@@ -30,6 +30,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.logging.Log;
@@ -62,8 +63,6 @@ import org.apache.thrift.transport.TServ
 import org.apache.thrift.transport.TServerTransport;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import com.facebook.fb303.fb_status;
 
 /**
@@ -670,8 +669,11 @@ public class HiveServer extends ThriftHi
 
 
       boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE);
+      int timeout = (int) HiveConf.getTimeVar(
+          conf, HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS);
 
-      TServerTransport serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, 1000 * conf.getIntVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT));
+      TServerTransport serverTransport =
+          tcpKeepAlive ? new TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, timeout);
 
       // set all properties specified on the command line
       for (Map.Entry<Object, Object> item : hiveconf.entrySet()) {

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java Mon Sep  8 04:38:17 2014
@@ -20,12 +20,14 @@ package org.apache.hive.service.auth;
 
 import javax.security.sasl.AuthenticationException;
 
+/**
+ * This authentication provider allows any combination of username and password.
+ */
 public class AnonymousAuthenticationProviderImpl implements PasswdAuthenticationProvider {
 
   @Override
   public void Authenticate(String user, String password) throws AuthenticationException {
     // no-op authentication
-    return;
   }
 
 }

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java Mon Sep  8 04:38:17 2014
@@ -19,15 +19,18 @@ package org.apache.hive.service.auth;
 
 import javax.security.sasl.AuthenticationException;
 
-public class AuthenticationProviderFactory {
+/**
+ * This class helps select a {@link PasswdAuthenticationProvider} for a given {@code AuthMethod}.
+ */
+public final class AuthenticationProviderFactory {
 
-  public static enum AuthMethods {
+  public enum AuthMethods {
     LDAP("LDAP"),
     PAM("PAM"),
     CUSTOM("CUSTOM"),
     NONE("NONE");
 
-    String authMethod;
+    private final String authMethod;
 
     AuthMethods(String authMethod) {
       this.authMethod = authMethod;
@@ -37,7 +40,8 @@ public class AuthenticationProviderFacto
       return authMethod;
     }
 
-    public static AuthMethods getValidAuthMethod(String authMethodStr) throws AuthenticationException {
+    public static AuthMethods getValidAuthMethod(String authMethodStr)
+      throws AuthenticationException {
       for (AuthMethods auth : AuthMethods.values()) {
         if (authMethodStr.equals(auth.getAuthMethod())) {
           return auth;
@@ -47,24 +51,20 @@ public class AuthenticationProviderFacto
     }
   }
 
-  private AuthenticationProviderFactory () {
+  private AuthenticationProviderFactory() {
   }
 
   public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
-      throws AuthenticationException {
-    if (authMethod.equals(AuthMethods.LDAP)) {
+    throws AuthenticationException {
+    if (authMethod == AuthMethods.LDAP) {
       return new LdapAuthenticationProviderImpl();
-    }
-    else if (authMethod.equals(AuthMethods.PAM)) {
+    } else if (authMethod == AuthMethods.PAM) {
       return new PamAuthenticationProviderImpl();
-    }
-    else if (authMethod.equals(AuthMethods.CUSTOM)) {
+    } else if (authMethod == AuthMethods.CUSTOM) {
       return new CustomAuthenticationProviderImpl();
-    }
-    else if (authMethod.equals(AuthMethods.NONE)) {
+    } else if (authMethod == AuthMethods.NONE) {
       return new AnonymousAuthenticationProviderImpl();
-    }
-    else {
+    } else {
       throw new AuthenticationException("Unsupported authentication method");
     }
   }

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java Mon Sep  8 04:38:17 2014
@@ -22,27 +22,29 @@ import javax.security.sasl.Authenticatio
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.util.ReflectionUtils;
 
-public class CustomAuthenticationProviderImpl
-  implements PasswdAuthenticationProvider {
+/**
+ * This authentication provider implements the {@code CUSTOM} authentication. It allows a {@link
+ * PasswdAuthenticationProvider} to be specified at configuration time which may additionally
+ * implement {@link org.apache.hadoop.conf.Configurable Configurable} to grab Hive's {@link
+ * org.apache.hadoop.conf.Configuration Configuration}.
+ */
+public class CustomAuthenticationProviderImpl implements PasswdAuthenticationProvider {
 
-  Class<? extends PasswdAuthenticationProvider> customHandlerClass;
-  PasswdAuthenticationProvider customProvider;
+  private final PasswdAuthenticationProvider customProvider;
 
   @SuppressWarnings("unchecked")
-  CustomAuthenticationProviderImpl () {
+  CustomAuthenticationProviderImpl() {
     HiveConf conf = new HiveConf();
-    this.customHandlerClass = (Class<? extends PasswdAuthenticationProvider>)
-        conf.getClass(
-            HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
-            PasswdAuthenticationProvider.class);
-    this.customProvider =
-        ReflectionUtils.newInstance(this.customHandlerClass, conf);
+    Class<? extends PasswdAuthenticationProvider> customHandlerClass =
+      (Class<? extends PasswdAuthenticationProvider>) conf.getClass(
+        HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
+        PasswdAuthenticationProvider.class);
+    customProvider = ReflectionUtils.newInstance(customHandlerClass, conf);
   }
 
   @Override
-  public void Authenticate(String user, String  password)
-      throws AuthenticationException {
-    this.customProvider.Authenticate(user, password);
+  public void Authenticate(String user, String password) throws AuthenticationException {
+    customProvider.Authenticate(user, password);
   }
 
 }

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java Mon Sep  8 04:38:17 2014
@@ -23,7 +23,6 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.HashMap;
 import java.util.Map;
-
 import javax.security.auth.login.LoginException;
 import javax.security.sasl.Sasl;
 
@@ -41,13 +40,14 @@ import org.apache.thrift.transport.TSock
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
+/**
+ * This class helps in some aspects of authentication. It creates the proper Thrift classes for the
+ * given configuration as well as helps with authenticating requests.
+ */
 public class HiveAuthFactory {
-  private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
 
-  public static enum AuthTypes {
+  public enum AuthTypes {
     NOSASL("NOSASL"),
     NONE("NONE"),
     LDAP("LDAP"),
@@ -55,7 +55,7 @@ public class HiveAuthFactory {
     CUSTOM("CUSTOM"),
     PAM("PAM");
 
-    private String authType;
+    private final String authType;
 
     AuthTypes(String authType) {
       this.authType = authType;
@@ -65,11 +65,11 @@ public class HiveAuthFactory {
       return authType;
     }
 
-  };
+  }
 
-  private HadoopThriftAuthBridge.Server saslServer = null;
+  private HadoopThriftAuthBridge.Server saslServer;
   private String authTypeStr;
-  private String transportMode;
+  private final String transportMode;
   private final HiveConf conf;
 
   public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
@@ -81,21 +81,19 @@ public class HiveAuthFactory {
     authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
 
     // In http mode we use NOSASL as the default auth type
-    if (transportMode.equalsIgnoreCase("http")) {
+    if ("http".equalsIgnoreCase(transportMode)) {
       if (authTypeStr == null) {
         authTypeStr = AuthTypes.NOSASL.getAuthName();
       }
-    }
-    else {
+    } else {
       if (authTypeStr == null) {
         authTypeStr = AuthTypes.NONE.getAuthName();
       }
       if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())
           && ShimLoader.getHadoopShims().isSecureShimImpl()) {
-        saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(
-            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
-            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)
-            );
+        saslServer = ShimLoader.getHadoopThriftAuthBridge()
+          .createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+                        conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
         // start delegation token manager
         try {
           saslServer.startDelegationTokenSecretManager(conf, null);
@@ -108,8 +106,7 @@ public class HiveAuthFactory {
 
   public Map<String, String> getSaslProperties() {
     Map<String, String> saslProps = new HashMap<String, String>();
-    SaslQOP saslQOP =
-        SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
+    SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
     saslProps.put(Sasl.QOP, saslQOP.toString());
     saslProps.put(Sasl.SERVER_AUTH, "true");
     return saslProps;
@@ -139,12 +136,10 @@ public class HiveAuthFactory {
     return transportFactory;
   }
 
-  public TProcessorFactory getAuthProcFactory(ThriftCLIService service)
-      throws LoginException {
-    if (transportMode.equalsIgnoreCase("http")) {
+  public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
+    if ("http".equalsIgnoreCase(transportMode)) {
       return HttpAuthUtils.getAuthProcFactory(service);
-    }
-    else {
+    } else {
       if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
         return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
       } else {
@@ -154,18 +149,14 @@ public class HiveAuthFactory {
   }
 
   public String getRemoteUser() {
-    if (saslServer != null) {
-      return saslServer.getRemoteUser();
-    } else {
-      return null;
-    }
+    return saslServer == null ? null : saslServer.getRemoteUser();
   }
 
   public String getIpAddress() {
-    if(saslServer != null && saslServer.getRemoteAddress() != null) {
-      return saslServer.getRemoteAddress().getHostAddress();
-    } else {
+    if (saslServer == null || saslServer.getRemoteAddress() == null) {
       return null;
+    } else {
+      return saslServer.getRemoteAddress().getHostAddress();
     }
   }
 
@@ -173,62 +164,58 @@ public class HiveAuthFactory {
   public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
-    if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
-      ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
+    if (principal.isEmpty() || keyTabFile.isEmpty()) {
+      throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
     } else {
-      throw new IOException ("HiveServer2 kerberos principal or keytab " +
-          "is not correctly configured");
+      ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
     }
   }
 
-  // Perform spnego login using the hadoop shim API if the configuration is available
-  public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(
-      HiveConf hiveConf) throws IOException {
+  // Perform SPNEGO login using the hadoop shim API if the configuration is available
+  public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
+    throws IOException {
     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
-    if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
-      return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(
-          principal, keyTabFile);
+    if (principal.isEmpty() || keyTabFile.isEmpty()) {
+      throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
     } else {
-      throw new IOException ("HiveServer2 SPNego principal or keytab " +
-          "is not correctly configured");
+      return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(principal, keyTabFile);
     }
   }
 
-  public static TTransport getSocketTransport(String host, int port, int loginTimeout)
-      throws TTransportException {
+  public static TTransport getSocketTransport(String host, int port, int loginTimeout) {
     return new TSocket(host, port, loginTimeout);
   }
 
   public static TTransport getSSLSocket(String host, int port, int loginTimeout)
-      throws TTransportException {
+    throws TTransportException {
     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
   }
 
   public static TTransport getSSLSocket(String host, int port, int loginTimeout,
-      String trustStorePath, String trustStorePassWord) throws TTransportException {
+    String trustStorePath, String trustStorePassWord) throws TTransportException {
     TSSLTransportFactory.TSSLTransportParameters params =
-        new TSSLTransportFactory.TSSLTransportParameters();
+      new TSSLTransportFactory.TSSLTransportParameters();
     params.setTrustStore(trustStorePath, trustStorePassWord);
     params.requireClientAuth(true);
     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
   }
 
   public static TServerSocket getServerSocket(String hiveHost, int portNum)
-      throws TTransportException {
-    InetSocketAddress serverAddress = null;
-    if (hiveHost != null && !hiveHost.isEmpty()) {
-      serverAddress = new InetSocketAddress(hiveHost, portNum);
+    throws TTransportException {
+    InetSocketAddress serverAddress;
+    if (hiveHost == null || hiveHost.isEmpty()) {
+      serverAddress = new InetSocketAddress(portNum);
     } else {
-      serverAddress = new  InetSocketAddress(portNum);
+      serverAddress = new InetSocketAddress(hiveHost, portNum);
     }
-    return new TServerSocket(serverAddress );
+    return new TServerSocket(serverAddress);
   }
 
-  public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
-      String keyStorePath, String keyStorePassWord) throws TTransportException, UnknownHostException {
+  public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath,
+    String keyStorePassWord) throws TTransportException, UnknownHostException {
     TSSLTransportFactory.TSSLTransportParameters params =
-        new TSSLTransportFactory.TSSLTransportParameters();
+      new TSSLTransportFactory.TSSLTransportParameters();
     params.setKeyStore(keyStorePath, keyStorePassWord);
 
     InetAddress serverAddress;
@@ -243,8 +230,7 @@ public class HiveAuthFactory {
   // retrieve delegation token for the given user
   public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
 
     try {
@@ -263,8 +249,7 @@ public class HiveAuthFactory {
   // cancel given delegation token
   public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
     try {
       saslServer.cancelDelegationToken(delegationToken);
@@ -275,8 +260,7 @@ public class HiveAuthFactory {
 
   public void renewDelegationToken(String delegationToken) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
     try {
       saslServer.renewDelegationToken(delegationToken);
@@ -287,21 +271,21 @@ public class HiveAuthFactory {
 
   public String getUserFromToken(String delegationToken) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
     try {
       return saslServer.getUserFromToken(delegationToken);
     } catch (IOException e) {
-      throw new HiveSQLException("Error extracting user from delegation token " + delegationToken, e);
+      throw new HiveSQLException("Error extracting user from delegation token " + delegationToken,
+                                 e);
     }
   }
 
   public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
-      HiveConf hiveConf) throws HiveSQLException {
-    UserGroupInformation sessionUgi;
+    HiveConf hiveConf) throws HiveSQLException {
 
     try {
+      UserGroupInformation sessionUgi;
       if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
         sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser);
       } else {
@@ -309,11 +293,11 @@ public class HiveAuthFactory {
       }
       if (!proxyUser.equalsIgnoreCase(realUser)) {
         ShimLoader.getHadoopShims().
-        authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
+          authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
       }
     } catch (IOException e) {
-      throw new HiveSQLException("Failed to validate proxy privilage of " + realUser +
-          " for " + proxyUser, e);
+      throw new HiveSQLException(
+        "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, e);
     }
   }
 

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java Mon Sep  8 04:38:17 2014
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.hive.service.auth;
 
 import java.io.IOException;
@@ -36,110 +35,95 @@ import org.apache.thrift.TProcessorFacto
 import org.apache.thrift.transport.TTransport;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.Oid;
 
 /**
- *
- * Utility functions for http mode authentication
- *
+ * Utility functions for HTTP mode authentication.
  */
-public class HttpAuthUtils {
+public final class HttpAuthUtils {
 
   public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
   public static final String AUTHORIZATION = "Authorization";
   public static final String BASIC = "Basic";
   public static final String NEGOTIATE = "Negotiate";
 
-  public static class HttpCLIServiceProcessorFactory extends TProcessorFactory {
-    private final ThriftCLIService service;
-    private final HiveConf hiveConf;
-    private final boolean isDoAsEnabled;
-
-    public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
-      super(null);
-      this.service = service;
-      this.hiveConf = service.getHiveConf();
-      this.isDoAsEnabled = hiveConf.getBoolVar(
-          HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
-    }
-
-    @Override
-    public TProcessor getProcessor(TTransport trans) {
-      TProcessor baseProcessor = new TCLIService.Processor<Iface>(service);
-      return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) :
-        baseProcessor;
-    }
-  }
-
   public static TProcessorFactory getAuthProcFactory(ThriftCLIService service) {
     return new HttpCLIServiceProcessorFactory(service);
   }
 
   /**
-   *
    * @return Stringified Base64 encoded kerberosAuthHeader on success
-   * @throws GSSException
-   * @throws IOException
-   * @throws InterruptedException
    */
-  public static String getKerberosServiceTicket(String principal,
-      String host, String serverHttpUrl)
-          throws GSSException, IOException, InterruptedException {
+  public static String getKerberosServiceTicket(String principal, String host, String serverHttpUrl)
+    throws IOException, InterruptedException {
     UserGroupInformation clientUGI = getClientUGI("kerberos");
     String serverPrincipal = getServerPrincipal(principal, host);
     // Uses the Ticket Granting Ticket in the UserGroupInformation
-    return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal,
-        clientUGI.getShortUserName(), serverHttpUrl));
+    return clientUGI.doAs(
+      new HttpKerberosClientAction(serverPrincipal, clientUGI.getShortUserName(), serverHttpUrl));
   }
 
   /**
-   * Get server pricipal and verify that hostname is present
-   * @return
-   * @throws IOException
+   * Get server principal and verify that hostname is present.
    */
-  private static String getServerPrincipal(String principal, String host)
-      throws IOException {
-    return ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(
-        principal, host);
+  private static String getServerPrincipal(String principal, String host) throws IOException {
+    return ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(principal, host);
   }
 
   /**
    * JAAS login to setup the client UserGroupInformation.
-   * Sets up the kerberos Ticket Granting Ticket,
-   * in the client UserGroupInformation object
+   * Sets up the Kerberos Ticket Granting Ticket,
+   * in the client UserGroupInformation object.
+   *
    * @return Client's UserGroupInformation
-   * @throws IOException
    */
-  public static UserGroupInformation getClientUGI(String authType)
-      throws IOException {
+  public static UserGroupInformation getClientUGI(String authType) throws IOException {
     return ShimLoader.getHadoopThriftAuthBridge().getCurrentUGIWithConf(authType);
   }
 
-  /**
-   *
-   * HttpKerberosClientAction
-   *
-   */
-  public static class HttpKerberosClientAction implements
-  PrivilegedExceptionAction<String> {
-    String serverPrincipal;
-    String clientUserName;
-    String serverHttpUrl;
-    private final Base64 base64codec;
+  private HttpAuthUtils() {
+    throw new UnsupportedOperationException("Can't initialize class");
+  }
+
+  public static class HttpCLIServiceProcessorFactory extends TProcessorFactory {
+
+    private final ThriftCLIService service;
+    private final HiveConf hiveConf;
+    private final boolean isDoAsEnabled;
+
+    public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
+      super(null);
+      this.service = service;
+      hiveConf = service.getHiveConf();
+      isDoAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
+    }
+
+    @Override
+    public TProcessor getProcessor(TTransport trans) {
+      TProcessor baseProcessor = new TCLIService.Processor<Iface>(service);
+      return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) : baseProcessor;
+    }
+  }
+
+  public static class HttpKerberosClientAction implements PrivilegedExceptionAction<String> {
+
     public static final String HTTP_RESPONSE = "HTTP_RESPONSE";
     public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
+    private final String serverPrincipal;
+    private final String clientUserName;
+    private final String serverHttpUrl;
+    private final Base64 base64codec;
     private final HttpContext httpContext;
 
-    public HttpKerberosClientAction(String serverPrincipal,
-        String clientUserName, String serverHttpUrl) {
+    public HttpKerberosClientAction(String serverPrincipal, String clientUserName,
+      String serverHttpUrl) {
       this.serverPrincipal = serverPrincipal;
       this.clientUserName = clientUserName;
       this.serverHttpUrl = serverHttpUrl;
-      this.base64codec = new Base64(0);
-      this.httpContext = new BasicHttpContext();
+      base64codec = new Base64(0);
+      httpContext = new BasicHttpContext();
       httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
     }
 
@@ -158,8 +142,8 @@ public class HttpAuthUtils {
       GSSName serverName = manager.createName(serverPrincipal, krb5PrincipalOid);
 
       // GSS credentials for client
-      GSSCredential clientCreds = manager.createCredential(clientName,
-          GSSCredential.DEFAULT_LIFETIME, mechOid,
+      GSSCredential clientCreds =
+        manager.createCredential(clientName, GSSCredential.DEFAULT_LIFETIME, mechOid,
           GSSCredential.INITIATE_ONLY);
 
       /*
@@ -170,22 +154,20 @@ public class HttpAuthUtils {
        *      use. The client chooses the mechanism to use.
        *    - clientCreds are the client credentials
        */
-      GSSContext gssContext = manager.createContext(serverName,
-          mechOid, clientCreds, GSSContext.DEFAULT_LIFETIME);
+      GSSContext gssContext =
+        manager.createContext(serverName, mechOid, clientCreds, GSSContext.DEFAULT_LIFETIME);
 
       // Mutual authentication not r
       gssContext.requestMutualAuth(false);
 
-      // Estabilish context
+      // Establish context
       byte[] inToken = new byte[0];
-      byte[] outToken;
 
-      outToken = gssContext.initSecContext(inToken, 0, inToken.length);
+      byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
 
       gssContext.dispose();
       // Base64 encoded and stringified token for server
-      String authHeaderBase64String = new String(base64codec.encode(outToken));
-      return authHeaderBase64String;
+      return new String(base64codec.encode(outToken));
     }
   }
 }

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java Mon Sep  8 04:38:17 2014
@@ -14,26 +14,27 @@
 
 package org.apache.hive.service.auth;
 
-public class HttpAuthenticationException extends Exception{
-  static final long serialVersionUID = 0;
+public class HttpAuthenticationException extends Exception {
+
+  private static final long serialVersionUID = 0;
 
   /**
-   * @param cause original exception.
+   * @param cause original exception
    */
   public HttpAuthenticationException(Throwable cause) {
     super(cause);
   }
 
   /**
-   * @param msg exception message.
+   * @param msg exception message
    */
   public HttpAuthenticationException(String msg) {
     super(msg);
   }
 
   /**
-   * @param msg exception message.
-   * @param cause original exception.
+   * @param msg   exception message
+   * @param cause original exception
    */
   public HttpAuthenticationException(String msg, Throwable cause) {
     super(msg, cause);

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java Mon Sep  8 04:38:17 2014
@@ -31,14 +31,11 @@ import org.apache.thrift.TProcessor;
 import org.apache.thrift.protocol.TProtocol;
 
 /**
- *
- * Wraps the underlying thrift processor's process call,
+ * Wraps the underlying Thrift processor's process call,
  * to assume the client user's UGI/Subject for the doAs calls.
- * Gets the client's username from a threadlocal in SessionManager which is
+ * Gets the client's username from a ThreadLocal in SessionManager which is
  * set in the ThriftHttpServlet, and constructs a client UGI object from that.
- *
  */
-
 public class HttpCLIServiceUGIProcessor implements TProcessor {
 
   private final TProcessor underlyingProcessor;
@@ -46,18 +43,18 @@ public class HttpCLIServiceUGIProcessor 
 
   public HttpCLIServiceUGIProcessor(TProcessor underlyingProcessor) {
     this.underlyingProcessor = underlyingProcessor;
-    this.shim = ShimLoader.getHadoopShims();
+    shim = ShimLoader.getHadoopShims();
   }
 
   @Override
   public boolean process(final TProtocol in, final TProtocol out) throws TException {
-    /**
-     * Build the client UGI from threadlocal username [SessionManager.getUserName()].
-     * The threadlocal username is set in the ThriftHttpServlet.
+    /*
+     * Build the client UGI from ThreadLocal username [SessionManager.getUserName()].
+     * The ThreadLocal username is set in the ThriftHttpServlet.
      */
-    UserGroupInformation clientUgi = null;
     try {
-      clientUgi = shim.createRemoteUser(SessionManager.getUserName(), new ArrayList<String>());
+      UserGroupInformation clientUgi =
+        shim.createRemoteUser(SessionManager.getUserName(), new ArrayList<String>());
       return shim.doAs(clientUgi, new PrivilegedExceptionAction<Boolean>() {
         @Override
         public Boolean run() {
@@ -68,10 +65,9 @@ public class HttpCLIServiceUGIProcessor 
           }
         }
       });
-    }
-    catch (RuntimeException rte) {
+    } catch (RuntimeException rte) {
       if (rte.getCause() instanceof TException) {
-        throw (TException)rte.getCause();
+        throw (TException) rte.getCause();
       }
       throw rte;
     } catch (InterruptedException ie) {

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java Mon Sep  8 04:38:17 2014
@@ -19,7 +19,6 @@ package org.apache.hive.service.auth;
 
 import java.io.IOException;
 import java.util.Map;
-
 import javax.security.sasl.SaslException;
 
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -33,37 +32,20 @@ import org.apache.thrift.TProcessorFacto
 import org.apache.thrift.transport.TSaslClientTransport;
 import org.apache.thrift.transport.TTransport;
 
-public class KerberosSaslHelper {
-
-  private static class CLIServiceProcessorFactory extends TProcessorFactory {
-    private final ThriftCLIService service;
-    private final Server saslServer;
-
-    public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
-      super(null);
-      this.service = service;
-      this.saslServer = saslServer;
-    }
-
-    @Override
-    public TProcessor getProcessor(TTransport trans) {
-      TProcessor sqlProcessor = new TCLIService.Processor<Iface>(service);
-      return saslServer.wrapNonAssumingProcessor(sqlProcessor);
-    }
-  }
+public final class KerberosSaslHelper {
 
   public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
-      ThriftCLIService service) {
-    return new CLIServiceProcessorFactory (saslServer, service);
+    ThriftCLIService service) {
+    return new CLIServiceProcessorFactory(saslServer, service);
   }
 
   public static TTransport getKerberosTransport(String principal, String host,
-      final TTransport underlyingTransport, Map<String, String> saslProps, boolean assumeSubject) throws SaslException {
+    TTransport underlyingTransport, Map<String, String> saslProps, boolean assumeSubject)
+    throws SaslException {
     try {
-      final String names[] = principal.split("[/@]");
+      String[] names = principal.split("[/@]");
       if (names.length != 3) {
-        throw new IllegalArgumentException("Kerberos principal should have 3 parts: "
-            + principal);
+        throw new IllegalArgumentException("Kerberos principal should have 3 parts: " + principal);
       }
 
       if (assumeSubject) {
@@ -71,20 +53,21 @@ public class KerberosSaslHelper {
       } else {
         HadoopThriftAuthBridge.Client authBridge =
           ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
-        return authBridge.createClientTransport(principal, host,
-          "KERBEROS", null, underlyingTransport, saslProps);
+        return authBridge.createClientTransport(principal, host, "KERBEROS", null,
+                                                underlyingTransport, saslProps);
       }
     } catch (IOException e) {
       throw new SaslException("Failed to open client transport", e);
     }
   }
 
-  public static TTransport createSubjectAssumedTransport(String principal, 
-		  TTransport underlyingTransport, Map<String, String> saslProps) throws IOException {
-    TTransport saslTransport = null;
-    final String names[] = principal.split("[/@]");
+  public static TTransport createSubjectAssumedTransport(String principal,
+    TTransport underlyingTransport, Map<String, String> saslProps) throws IOException {
+    String[] names = principal.split("[/@]");
     try {
-      saslTransport = new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null, underlyingTransport);
+      TTransport saslTransport =
+        new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
+          underlyingTransport);
       return new TSubjectAssumingTransport(saslTransport);
     } catch (SaslException se) {
       throw new IOException("Could not instantiate SASL transport", se);
@@ -92,15 +75,37 @@ public class KerberosSaslHelper {
   }
 
   public static TTransport getTokenTransport(String tokenStr, String host,
-      final TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
+    TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
     HadoopThriftAuthBridge.Client authBridge =
       ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
 
     try {
-      return authBridge.createClientTransport(null, host,
-          "DIGEST", tokenStr, underlyingTransport, saslProps);
+      return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
+                                              saslProps);
     } catch (IOException e) {
       throw new SaslException("Failed to open client transport", e);
     }
   }
+
+  private KerberosSaslHelper() {
+    throw new UnsupportedOperationException("Can't initialize class");
+  }
+
+  private static class CLIServiceProcessorFactory extends TProcessorFactory {
+
+    private final ThriftCLIService service;
+    private final Server saslServer;
+
+    public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
+      super(null);
+      this.service = service;
+      this.saslServer = saslServer;
+    }
+
+    @Override
+    public TProcessor getProcessor(TTransport trans) {
+      TProcessor sqlProcessor = new TCLIService.Processor<Iface>(service);
+      return saslServer.wrapNonAssumingProcessor(sqlProcessor);
+    }
+  }
 }

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java Mon Sep  8 04:38:17 2014
@@ -18,10 +18,8 @@
 package org.apache.hive.service.auth;
 
 import java.util.Hashtable;
-
 import javax.naming.Context;
 import javax.naming.NamingException;
-import javax.naming.directory.DirContext;
 import javax.naming.directory.InitialDirContext;
 import javax.security.sasl.AuthenticationException;
 
@@ -33,16 +31,15 @@ public class LdapAuthenticationProviderI
   private final String baseDN;
   private final String ldapDomain;
 
-  LdapAuthenticationProviderImpl () {
+  LdapAuthenticationProviderImpl() {
     HiveConf conf = new HiveConf();
-    this.ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
-    this.baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
-    this.ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
+    ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
+    baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
+    ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
   }
 
   @Override
-  public void Authenticate(String user, String  password)
-      throws AuthenticationException {
+  public void Authenticate(String user, String password) throws AuthenticationException {
 
     Hashtable<String, Object> env = new Hashtable<String, Object>();
     env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
@@ -51,15 +48,15 @@ public class LdapAuthenticationProviderI
     //  If the domain is supplied, then append it. LDAP providers like Active Directory
     // use a fully qualified user name like foo@bar.com.
     if (ldapDomain != null) {
-      user  = user + "@" + ldapDomain;
+      user = user + "@" + ldapDomain;
     }
 
     // setup the security principal
     String bindDN;
-    if (baseDN != null) {
-      bindDN = "uid=" + user + "," + baseDN;
-    } else {
+    if (baseDN == null) {
       bindDN = user;
+    } else {
+      bindDN = "uid=" + user + "," + baseDN;
     }
     env.put(Context.SECURITY_AUTHENTICATION, "simple");
     env.put(Context.SECURITY_PRINCIPAL, bindDN);
@@ -67,12 +64,11 @@ public class LdapAuthenticationProviderI
 
     try {
       // Create initial context
-      DirContext ctx = new InitialDirContext(env);
+      Context ctx = new InitialDirContext(env);
       ctx.close();
     } catch (NamingException e) {
       throw new AuthenticationException("Error validating LDAP user", e);
     }
-  return;
   }
 
 }

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java Mon Sep  8 04:38:17 2014
@@ -20,33 +20,32 @@ package org.apache.hive.service.auth;
 import javax.security.sasl.AuthenticationException;
 
 import net.sf.jpam.Pam;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 
 public class PamAuthenticationProviderImpl implements PasswdAuthenticationProvider {
 
   private final String pamServiceNames;
 
-  PamAuthenticationProviderImpl () {
+  PamAuthenticationProviderImpl() {
     HiveConf conf = new HiveConf();
-    this.pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
+    pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
   }
 
   @Override
-  public void Authenticate(String user, String  password)
-      throws AuthenticationException {
+  public void Authenticate(String user, String password) throws AuthenticationException {
 
     if (pamServiceNames == null || pamServiceNames.trim().isEmpty()) {
       throw new AuthenticationException("No PAM services are set.");
     }
 
-    String pamServices[] = pamServiceNames.split(",");
+    String[] pamServices = pamServiceNames.split(",");
     for (String pamService : pamServices) {
       Pam pam = new Pam(pamService);
       boolean isAuthenticated = pam.authenticateSuccessful(user, password);
       if (!isAuthenticated) {
-        throw new AuthenticationException("Error authenticating with the PAM service: " + pamService);
+        throw new AuthenticationException(
+          "Error authenticating with the PAM service: " + pamService);
       }
     }
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/spark/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java?rev=1623263&r1=1623262&r2=1623263&view=diff
==============================================================================
--- hive/branches/spark/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java (original)
+++ hive/branches/spark/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java Mon Sep  8 04:38:17 2014
@@ -20,18 +20,20 @@ package org.apache.hive.service.auth;
 import javax.security.sasl.AuthenticationException;
 
 public interface PasswdAuthenticationProvider {
+
   /**
    * The Authenticate method is called by the HiveServer2 authentication layer
    * to authenticate users for their requests.
    * If a user is to be granted, return nothing/throw nothing.
    * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
-   *
+   * <p/>
    * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
    *
-   * @param user - The username received over the connection request
-   * @param password - The password received over the connection request
-   * @throws AuthenticationException - When a user is found to be
-   * invalid by the implementation
+   * @param user     The username received over the connection request
+   * @param password The password received over the connection request
+   *
+   * @throws AuthenticationException When a user is found to be
+   *                                 invalid by the implementation
    */
   void Authenticate(String user, String password) throws AuthenticationException;
 }