You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/02 21:57:07 UTC

svn commit: r1622108 [25/27] - in /hive/branches/tez: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/ common/src/java/...

Modified: hive/branches/tez/ql/src/test/results/clientpositive/view.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/view.q.out?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/view.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/view.q.out Tue Sep  2 19:56:56 2014
@@ -54,77 +54,101 @@ PREHOOK: query: -- relative reference, n
 CREATE VIEW v1 AS SELECT * FROM table1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v1
 POSTHOOK: query: -- relative reference, no alias
 CREATE VIEW v1 AS SELECT * FROM table1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v1
 PREHOOK: query: -- relative reference, aliased
 CREATE VIEW v2 AS SELECT t1.* FROM table1 t1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v2
 POSTHOOK: query: -- relative reference, aliased
 CREATE VIEW v2 AS SELECT t1.* FROM table1 t1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v2
 PREHOOK: query: -- relative reference, multiple tables
 CREATE VIEW v3 AS SELECT t1.*, t2.key k FROM table1 t1 JOIN table2 t2 ON t1.key = t2.key
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
 PREHOOK: Input: db1@table2
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v3
 POSTHOOK: query: -- relative reference, multiple tables
 CREATE VIEW v3 AS SELECT t1.*, t2.key k FROM table1 t1 JOIN table2 t2 ON t1.key = t2.key
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
 POSTHOOK: Input: db1@table2
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v3
 PREHOOK: query: -- absolute reference, no alias
 CREATE VIEW v4 AS SELECT * FROM db1.table1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v4
 POSTHOOK: query: -- absolute reference, no alias
 CREATE VIEW v4 AS SELECT * FROM db1.table1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v4
 PREHOOK: query: -- absolute reference, aliased
 CREATE VIEW v5 AS SELECT t1.* FROM db1.table1 t1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v5
 POSTHOOK: query: -- absolute reference, aliased
 CREATE VIEW v5 AS SELECT t1.* FROM db1.table1 t1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v5
 PREHOOK: query: -- absolute reference, multiple tables
 CREATE VIEW v6 AS SELECT t1.*, t2.key k FROM db1.table1 t1 JOIN db1.table2 t2 ON t1.key = t2.key
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
 PREHOOK: Input: db1@table2
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v6
 POSTHOOK: query: -- absolute reference, multiple tables
 CREATE VIEW v6 AS SELECT t1.*, t2.key k FROM db1.table1 t1 JOIN db1.table2 t2 ON t1.key = t2.key
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
 POSTHOOK: Input: db1@table2
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v6
 PREHOOK: query: -- relative reference, explicit column
 CREATE VIEW v7 AS SELECT key from table1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v7
 POSTHOOK: query: -- relative reference, explicit column
 CREATE VIEW v7 AS SELECT key from table1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v7
 PREHOOK: query: -- absolute reference, explicit column
 CREATE VIEW v8 AS SELECT key from db1.table1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: db1@table1
+PREHOOK: Output: database:db1
+PREHOOK: Output: db1@v8
 POSTHOOK: query: -- absolute reference, explicit column
 CREATE VIEW v8 AS SELECT key from db1.table1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: db1@table1
+POSTHOOK: Output: database:db1
 POSTHOOK: Output: db1@v8
 PREHOOK: query: CREATE DATABASE db2
 PREHOOK: type: CREATEDATABASE

Modified: hive/branches/tez/ql/src/test/results/clientpositive/view_cast.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/view_cast.q.out?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/view_cast.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/view_cast.q.out Tue Sep  2 19:56:56 2014
@@ -56,9 +56,12 @@ POSTHOOK: type: DROPVIEW
 PREHOOK: query: CREATE VIEW aa_view_tw AS SELECT ks_uid, sr_id, act, at_ks_uid, at_sr_uid, from_unixtime(CAST(CAST( tstamp as BIGINT)/1000 AS BIGINT),'yyyyMMdd') AS act_date, from_unixtime(CAST(CAST( original_tstamp AS BIGINT)/1000 AS BIGINT),'yyyyMMdd') AS content_creation_date FROM atab WHERE dt='20130312' AND nt='tw' AND ks_uid != at_ks_uid
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@atab
+PREHOOK: Output: database:default
+PREHOOK: Output: default@aa_view_tw
 POSTHOOK: query: CREATE VIEW aa_view_tw AS SELECT ks_uid, sr_id, act, at_ks_uid, at_sr_uid, from_unixtime(CAST(CAST( tstamp as BIGINT)/1000 AS BIGINT),'yyyyMMdd') AS act_date, from_unixtime(CAST(CAST( original_tstamp AS BIGINT)/1000 AS BIGINT),'yyyyMMdd') AS content_creation_date FROM atab WHERE dt='20130312' AND nt='tw' AND ks_uid != at_ks_uid
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@atab
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@aa_view_tw
 PREHOOK: query: DROP VIEW IF EXISTS joined_aa_view_tw
 PREHOOK: type: DROPVIEW
@@ -69,11 +72,14 @@ PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@aa_view_tw
 PREHOOK: Input: default@atab
 PREHOOK: Input: default@mstab
+PREHOOK: Output: database:default
+PREHOOK: Output: default@joined_aa_view_tw
 POSTHOOK: query: CREATE VIEW joined_aa_view_tw AS SELECT aa.ks_uid, aa.sr_id, aa.act, at_sr_uid, aa.act_date, aa.at_ks_uid, aa.content_creation_date, coalesce( other.ksc, 10.0) AS at_ksc, coalesce( self.ksc , 10.0 ) AS self_ksc FROM aa_view_tw aa LEFT OUTER JOIN ( SELECT ks_uid, csc AS ksc FROM mstab WHERE dt='20130311' ) self ON ( CAST(aa.ks_uid AS BIGINT) = CAST(self.ks_uid AS BIGINT) ) LEFT OUTER JOIN ( SELECT ks_uid, csc AS ksc FROM mstab WHERE dt='20130311' ) other ON ( CAST(aa.at_ks_uid AS BIGINT) = CAST(other.ks_uid AS BIGINT) )
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@aa_view_tw
 POSTHOOK: Input: default@atab
 POSTHOOK: Input: default@mstab
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@joined_aa_view_tw
 PREHOOK: query: SELECT * FROM joined_aa_view_tw
 PREHOOK: type: QUERY

Modified: hive/branches/tez/ql/src/test/results/clientpositive/view_inputs.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/view_inputs.q.out?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/view_inputs.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/view_inputs.q.out Tue Sep  2 19:56:56 2014
@@ -3,20 +3,26 @@ PREHOOK: query: -- Tests that selecting 
 CREATE VIEW test_view1 AS SELECT * FROM src
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_view1
 POSTHOOK: query: -- Tests that selecting from a view and another view that selects from that same view
 
 CREATE VIEW test_view1 AS SELECT * FROM src
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@test_view1
 PREHOOK: query: CREATE VIEW test_view2 AS SELECT * FROM test_view1
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@src
 PREHOOK: Input: default@test_view1
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_view2
 POSTHOOK: query: CREATE VIEW test_view2 AS SELECT * FROM test_view1
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@test_view1
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@test_view2
 PREHOOK: query: SELECT COUNT(*) FROM test_view1 a JOIN test_view2 b ON a.key = b.key
 PREHOOK: type: QUERY

Modified: hive/branches/tez/ql/src/test/results/clientpositive/windowing.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/windowing.q.out?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/windowing.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/windowing.q.out Tue Sep  2 19:56:56 2014
@@ -1024,6 +1024,8 @@ from part 
 group by p_mfgr, p_brand
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@part
+PREHOOK: Output: database:default
+PREHOOK: Output: default@mfgr_price_view
 POSTHOOK: query: -- 22. testViewAsTableInputWithWindowing
 create view IF NOT EXISTS mfgr_price_view as 
 select p_mfgr, p_brand, 
@@ -1032,6 +1034,7 @@ from part 
 group by p_mfgr, p_brand
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@part
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mfgr_price_view
 PREHOOK: query: select * 
 from (
@@ -1113,6 +1116,8 @@ from part 
 window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and current row)
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: default@part
+PREHOOK: Output: database:default
+PREHOOK: Output: default@mfgr_brand_price_view
 POSTHOOK: query: -- 23. testCreateViewWithWindowingQuery
 create view IF NOT EXISTS mfgr_brand_price_view as 
 select p_mfgr, p_brand, 
@@ -1121,6 +1126,7 @@ from part 
 window w1 as (distribute by p_mfgr sort by p_name rows between 2 preceding and current row)
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: default@part
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@mfgr_brand_price_view
 PREHOOK: query: select * from mfgr_brand_price_view
 PREHOOK: type: QUERY

Modified: hive/branches/tez/ql/src/test/results/clientpositive/windowing_navfn.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/windowing_navfn.q.out?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/windowing_navfn.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/windowing_navfn.q.out Tue Sep  2 19:56:56 2014
@@ -614,6 +614,8 @@ SELECT explode(
   ) s1 lateral view explode(barr) arr as b
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@wtest
 POSTHOOK: query: create table wtest as
 select a, b
 from
@@ -628,6 +630,7 @@ SELECT explode(
   ) s1 lateral view explode(barr) arr as b
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@wtest
 PREHOOK: query: select a, b,
 first_value(b) over (partition by a order by b rows between 1 preceding and 1 following ) ,

Modified: hive/branches/tez/ql/src/test/results/clientpositive/windowing_streaming.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/windowing_streaming.q.out?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/windowing_streaming.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/windowing_streaming.q.out Tue Sep  2 19:56:56 2014
@@ -347,10 +347,13 @@ PREHOOK: query: create table sB ROW FORM
 select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from  alltypesorc) a where r < 5
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sB
 POSTHOOK: query: create table sB ROW FORMAT DELIMITED FIELDS TERMINATED BY ','  STORED AS TEXTFILE as  
 select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from  alltypesorc) a where r < 5
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@sB
 PREHOOK: query: select * from sB
 where ctinyint is null
@@ -412,10 +415,13 @@ PREHOOK: query: create table sD ROW FORM
 select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from  alltypesorc) a where r < 5
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sD
 POSTHOOK: query: create table sD ROW FORMAT DELIMITED FIELDS TERMINATED BY ','  STORED AS TEXTFILE as  
 select * from (select ctinyint, cdouble, rank() over(partition by ctinyint order by cdouble) r from  alltypesorc) a where r < 5
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@sD
 PREHOOK: query: select * from sD
 where ctinyint is null

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java Tue Sep  2 19:56:56 2014
@@ -52,7 +52,7 @@ public interface Deserializer {
    * Deserialize an object out of a Writable blob. In most cases, the return
    * value of this function will be constant since the function will reuse the
    * returned object. If the client wants to keep a copy of the object, the
-   * client needs to clone the returnDeserializered value by calling
+   * client needs to clone the returned deserialized value by calling
    * ObjectInspectorUtils.getStandardObject().
    *
    * @param blob

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Tue Sep  2 19:56:56 2014
@@ -69,9 +69,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.util.StringUtils;
@@ -108,7 +106,7 @@ public final class ObjectInspectorUtils 
       PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
       if (!(poi instanceof AbstractPrimitiveWritableObjectInspector)) {
         return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
-            (PrimitiveTypeInfo)poi.getTypeInfo());
+            poi.getTypeInfo());
       }
     }
     return oi;
@@ -292,24 +290,21 @@ public final class ObjectInspectorUtils 
     switch (oi.getCategory()) {
     case PRIMITIVE: {
       PrimitiveObjectInspector loi = (PrimitiveObjectInspector) oi;
-      switch (objectInspectorOption) {
-      case DEFAULT: {
-        if (loi.preferWritable()) {
-          result = loi.getPrimitiveWritableObject(loi.copyObject(o));
-        } else {
-          result = loi.getPrimitiveJavaObject(o);
-        }
-        break;
+      if (objectInspectorOption == ObjectInspectorCopyOption.DEFAULT) {
+        objectInspectorOption = loi.preferWritable() ?
+            ObjectInspectorCopyOption.WRITABLE : ObjectInspectorCopyOption.JAVA;
       }
-      case JAVA: {
+      switch (objectInspectorOption) {
+      case JAVA:
         result = loi.getPrimitiveJavaObject(o);
+        if (loi.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.TIMESTAMP) {
+          result = PrimitiveObjectInspectorFactory.javaTimestampObjectInspector.copyObject(result);
+        }
         break;
-      }
-      case WRITABLE: {
+      case WRITABLE:
         result = loi.getPrimitiveWritableObject(loi.copyObject(o));
         break;
       }
-      }
       break;
     }
     case LIST: {

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaBinaryObjectInspector.java Tue Sep  2 19:56:56 2014
@@ -32,17 +32,6 @@ public class JavaBinaryObjectInspector e
   }
 
   @Override
-  public byte[] copyObject(Object o) {
-    if (null == o){
-      return null;
-    }
-    byte[] incoming = (byte[])o;
-    byte[] outgoing = new byte[incoming.length];
-    System.arraycopy(incoming, 0, outgoing, 0, incoming.length);
-    return outgoing;
-  }
-
-  @Override
   public BytesWritable getPrimitiveWritableObject(Object o) {
     return o == null ? null : new BytesWritable((byte[])o);
   }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveCharObjectInspector.java Tue Sep  2 19:56:56 2014
@@ -64,15 +64,16 @@ public class JavaHiveCharObjectInspector
   }
 
   public Object set(Object o, HiveChar value) {
-    HiveChar setValue = (HiveChar) o;
-    setValue.setValue(value, getMaxLength());
-    return setValue;
+    if (BaseCharUtils.doesPrimitiveMatchTypeParams(value,
+        (CharTypeInfo) typeInfo)) {
+      return value;
+    } else {
+      return new HiveChar(value, getMaxLength());
+    }
   }
 
   public Object set(Object o, String value) {
-    HiveChar setValue = (HiveChar) o;
-    setValue.setValue(value, getMaxLength());
-    return setValue;
+    return new HiveChar(value, getMaxLength());
   }
 
   public Object create(HiveChar value) {

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaHiveVarcharObjectInspector.java Tue Sep  2 19:56:56 2014
@@ -19,11 +19,11 @@ package org.apache.hadoop.hive.serde2.ob
 
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 
 public class JavaHiveVarcharObjectInspector extends AbstractPrimitiveJavaObjectInspector
-implements SettableHiveVarcharObjectInspector {
+  implements SettableHiveVarcharObjectInspector {
 
   // no-arg ctor required for Kyro serialization
   public JavaHiveVarcharObjectInspector() {
@@ -38,9 +38,8 @@ implements SettableHiveVarcharObjectInsp
     if (o == null) {
       return null;
     }
-    HiveVarchar value = (HiveVarchar)o;
-    if (BaseCharUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeInfo)typeInfo)) {
+    HiveVarchar value = (HiveVarchar) o;
+    if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) {
       return value;
     }
     // value needs to be converted to match the type params (length, etc).
@@ -52,40 +51,27 @@ implements SettableHiveVarcharObjectInsp
     if (o == null) {
       return null;
     }
-    return getWritableWithParams((HiveVarchar)o);
-  }
-
-  private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
-    HiveVarchar hv = new HiveVarchar(val, getMaxLength());
-    return hv;
-  }
-
-  private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
-    HiveVarcharWritable newValue = new HiveVarcharWritable();
-    newValue.set(val, getMaxLength());
-    return newValue;
+    return getWritableWithParams((HiveVarchar) o);
   }
 
   @Override
   public Object set(Object o, HiveVarchar value) {
-    if (BaseCharUtils.doesPrimitiveMatchTypeParams(
-        value, (VarcharTypeInfo)typeInfo)) {
-      return o = value;
+    if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) {
+      return value;
     } else {
       // Otherwise value may be too long, convert to appropriate value based on params
-      return o = new HiveVarchar(value,  getMaxLength());
+      return new HiveVarchar(value, getMaxLength());
     }
   }
 
   @Override
   public Object set(Object o, String value) {
-    return o = new HiveVarchar(value, getMaxLength());
+    return new HiveVarchar(value, getMaxLength());
   }
 
   @Override
   public Object create(HiveVarchar value) {
-    HiveVarchar hc = new HiveVarchar(value, getMaxLength());
-    return hc;
+    return new HiveVarchar(value, getMaxLength());
   }
 
   public int getMaxLength() {
@@ -93,4 +79,14 @@ implements SettableHiveVarcharObjectInsp
     return ti.getLength();
   }
 
+  private HiveVarchar getPrimitiveWithParams(HiveVarchar val) {
+    return new HiveVarchar(val, getMaxLength());
+  }
+
+  private HiveVarcharWritable getWritableWithParams(HiveVarchar val) {
+    HiveVarcharWritable newValue = new HiveVarcharWritable();
+    newValue.set(val, getMaxLength());
+    return newValue;
+  }
+
 }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaTimestampObjectInspector.java Tue Sep  2 19:56:56 2014
@@ -39,6 +39,17 @@ public class JavaTimestampObjectInspecto
     return o == null ? null : (Timestamp) o;
   }
 
+  @Override
+  public Object copyObject(Object o) {
+    if (o == null) {
+      return null;
+    }
+    Timestamp source = (Timestamp) o;
+    Timestamp copy = new Timestamp(source.getTime());
+    copy.setNanos(source.getNanos());
+    return copy;
+  }
+
   public Timestamp get(Object o) {
     return (Timestamp) o;
   }

Modified: hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java (original)
+++ hive/branches/tez/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java Tue Sep  2 19:56:56 2014
@@ -26,10 +26,11 @@ import org.apache.hadoop.hive.serde2.obj
  * Stores information about a type. Always use the TypeInfoFactory to create new
  * TypeInfo objects.
  *
- * We support 5 categories of types: 1. Primitive objects (String, Number, etc)
+ * We support 8 categories of types: 1. Primitive objects (String, Number, etc)
  * 2. List objects (a list of objects of a single type) 3. Map objects (a map
  * from objects of one type to objects of another type) 4. Struct objects (a
  * list of fields with names and their own types) 5. Union objects
+ * 6. Decimal objects 7. Char objects 8. Varchar objects
  */
 public abstract class TypeInfo implements Serializable {
 

Modified: hive/branches/tez/service/if/TCLIService.thrift
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/if/TCLIService.thrift?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/if/TCLIService.thrift (original)
+++ hive/branches/tez/service/if/TCLIService.thrift Tue Sep  2 19:56:56 2014
@@ -1054,6 +1054,9 @@ struct TFetchResultsReq {
   // Max number of rows that should be returned in
   // the rowset.
   3: required i64 maxRows
+
+  // The type of a fetch results request. 0 represents Query output. 1 represents Log
+  4: optional i16 fetchType = 0
 }
 
 struct TFetchResultsResp {

Modified: hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp (original)
+++ hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.cpp Tue Sep  2 19:56:56 2014
@@ -6137,8 +6137,8 @@ void swap(TGetResultSetMetadataResp &a, 
   swap(a.__isset, b.__isset);
 }
 
-const char* TFetchResultsReq::ascii_fingerprint = "1B96A8C05BA9DD699FC8CD842240ABDE";
-const uint8_t TFetchResultsReq::binary_fingerprint[16] = {0x1B,0x96,0xA8,0xC0,0x5B,0xA9,0xDD,0x69,0x9F,0xC8,0xCD,0x84,0x22,0x40,0xAB,0xDE};
+const char* TFetchResultsReq::ascii_fingerprint = "B4CB1E4F8F8F4D50183DD372AD11753A";
+const uint8_t TFetchResultsReq::binary_fingerprint[16] = {0xB4,0xCB,0x1E,0x4F,0x8F,0x8F,0x4D,0x50,0x18,0x3D,0xD3,0x72,0xAD,0x11,0x75,0x3A};
 
 uint32_t TFetchResultsReq::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -6189,6 +6189,14 @@ uint32_t TFetchResultsReq::read(::apache
           xfer += iprot->skip(ftype);
         }
         break;
+      case 4:
+        if (ftype == ::apache::thrift::protocol::T_I16) {
+          xfer += iprot->readI16(this->fetchType);
+          this->__isset.fetchType = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -6223,6 +6231,11 @@ uint32_t TFetchResultsReq::write(::apach
   xfer += oprot->writeI64(this->maxRows);
   xfer += oprot->writeFieldEnd();
 
+  if (this->__isset.fetchType) {
+    xfer += oprot->writeFieldBegin("fetchType", ::apache::thrift::protocol::T_I16, 4);
+    xfer += oprot->writeI16(this->fetchType);
+    xfer += oprot->writeFieldEnd();
+  }
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;
@@ -6233,6 +6246,8 @@ void swap(TFetchResultsReq &a, TFetchRes
   swap(a.operationHandle, b.operationHandle);
   swap(a.orientation, b.orientation);
   swap(a.maxRows, b.maxRows);
+  swap(a.fetchType, b.fetchType);
+  swap(a.__isset, b.__isset);
 }
 
 const char* TFetchResultsResp::ascii_fingerprint = "FC43BC2D6F3B76D4DB0F34226A745C8E";

Modified: hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.h
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.h?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.h (original)
+++ hive/branches/tez/service/src/gen/thrift/gen-cpp/TCLIService_types.h Tue Sep  2 19:56:56 2014
@@ -3602,14 +3602,18 @@ class TGetResultSetMetadataResp {
 
 void swap(TGetResultSetMetadataResp &a, TGetResultSetMetadataResp &b);
 
+typedef struct _TFetchResultsReq__isset {
+  _TFetchResultsReq__isset() : fetchType(true) {}
+  bool fetchType;
+} _TFetchResultsReq__isset;
 
 class TFetchResultsReq {
  public:
 
-  static const char* ascii_fingerprint; // = "1B96A8C05BA9DD699FC8CD842240ABDE";
-  static const uint8_t binary_fingerprint[16]; // = {0x1B,0x96,0xA8,0xC0,0x5B,0xA9,0xDD,0x69,0x9F,0xC8,0xCD,0x84,0x22,0x40,0xAB,0xDE};
+  static const char* ascii_fingerprint; // = "B4CB1E4F8F8F4D50183DD372AD11753A";
+  static const uint8_t binary_fingerprint[16]; // = {0xB4,0xCB,0x1E,0x4F,0x8F,0x8F,0x4D,0x50,0x18,0x3D,0xD3,0x72,0xAD,0x11,0x75,0x3A};
 
-  TFetchResultsReq() : orientation((TFetchOrientation::type)0), maxRows(0) {
+  TFetchResultsReq() : orientation((TFetchOrientation::type)0), maxRows(0), fetchType(0) {
     orientation = (TFetchOrientation::type)0;
 
   }
@@ -3619,6 +3623,9 @@ class TFetchResultsReq {
   TOperationHandle operationHandle;
   TFetchOrientation::type orientation;
   int64_t maxRows;
+  int16_t fetchType;
+
+  _TFetchResultsReq__isset __isset;
 
   void __set_operationHandle(const TOperationHandle& val) {
     operationHandle = val;
@@ -3632,6 +3639,11 @@ class TFetchResultsReq {
     maxRows = val;
   }
 
+  void __set_fetchType(const int16_t val) {
+    fetchType = val;
+    __isset.fetchType = true;
+  }
+
   bool operator == (const TFetchResultsReq & rhs) const
   {
     if (!(operationHandle == rhs.operationHandle))
@@ -3640,6 +3652,10 @@ class TFetchResultsReq {
       return false;
     if (!(maxRows == rhs.maxRows))
       return false;
+    if (__isset.fetchType != rhs.__isset.fetchType)
+      return false;
+    else if (__isset.fetchType && !(fetchType == rhs.fetchType))
+      return false;
     return true;
   }
   bool operator != (const TFetchResultsReq &rhs) const {

Modified: hive/branches/tez/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java (original)
+++ hive/branches/tez/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TFetchResultsReq.java Tue Sep  2 19:56:56 2014
@@ -37,6 +37,7 @@ public class TFetchResultsReq implements
   private static final org.apache.thrift.protocol.TField OPERATION_HANDLE_FIELD_DESC = new org.apache.thrift.protocol.TField("operationHandle", org.apache.thrift.protocol.TType.STRUCT, (short)1);
   private static final org.apache.thrift.protocol.TField ORIENTATION_FIELD_DESC = new org.apache.thrift.protocol.TField("orientation", org.apache.thrift.protocol.TType.I32, (short)2);
   private static final org.apache.thrift.protocol.TField MAX_ROWS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxRows", org.apache.thrift.protocol.TType.I64, (short)3);
+  private static final org.apache.thrift.protocol.TField FETCH_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("fetchType", org.apache.thrift.protocol.TType.I16, (short)4);
 
   private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
   static {
@@ -47,6 +48,7 @@ public class TFetchResultsReq implements
   private TOperationHandle operationHandle; // required
   private TFetchOrientation orientation; // required
   private long maxRows; // required
+  private short fetchType; // optional
 
   /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
   public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -56,7 +58,8 @@ public class TFetchResultsReq implements
      * @see TFetchOrientation
      */
     ORIENTATION((short)2, "orientation"),
-    MAX_ROWS((short)3, "maxRows");
+    MAX_ROWS((short)3, "maxRows"),
+    FETCH_TYPE((short)4, "fetchType");
 
     private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
 
@@ -77,6 +80,8 @@ public class TFetchResultsReq implements
           return ORIENTATION;
         case 3: // MAX_ROWS
           return MAX_ROWS;
+        case 4: // FETCH_TYPE
+          return FETCH_TYPE;
         default:
           return null;
       }
@@ -118,7 +123,9 @@ public class TFetchResultsReq implements
 
   // isset id assignments
   private static final int __MAXROWS_ISSET_ID = 0;
+  private static final int __FETCHTYPE_ISSET_ID = 1;
   private byte __isset_bitfield = 0;
+  private _Fields optionals[] = {_Fields.FETCH_TYPE};
   public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
   static {
     Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -128,6 +135,8 @@ public class TFetchResultsReq implements
         new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TFetchOrientation.class)));
     tmpMap.put(_Fields.MAX_ROWS, new org.apache.thrift.meta_data.FieldMetaData("maxRows", org.apache.thrift.TFieldRequirementType.REQUIRED, 
         new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+    tmpMap.put(_Fields.FETCH_TYPE, new org.apache.thrift.meta_data.FieldMetaData("fetchType", org.apache.thrift.TFieldRequirementType.OPTIONAL, 
+        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
     metaDataMap = Collections.unmodifiableMap(tmpMap);
     org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TFetchResultsReq.class, metaDataMap);
   }
@@ -135,6 +144,8 @@ public class TFetchResultsReq implements
   public TFetchResultsReq() {
     this.orientation = org.apache.hive.service.cli.thrift.TFetchOrientation.FETCH_NEXT;
 
+    this.fetchType = (short)0;
+
   }
 
   public TFetchResultsReq(
@@ -161,6 +172,7 @@ public class TFetchResultsReq implements
       this.orientation = other.orientation;
     }
     this.maxRows = other.maxRows;
+    this.fetchType = other.fetchType;
   }
 
   public TFetchResultsReq deepCopy() {
@@ -174,6 +186,8 @@ public class TFetchResultsReq implements
 
     setMaxRowsIsSet(false);
     this.maxRows = 0;
+    this.fetchType = (short)0;
+
   }
 
   public TOperationHandle getOperationHandle() {
@@ -252,6 +266,28 @@ public class TFetchResultsReq implements
     __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __MAXROWS_ISSET_ID, value);
   }
 
+  public short getFetchType() {
+    return this.fetchType;
+  }
+
+  public void setFetchType(short fetchType) {
+    this.fetchType = fetchType;
+    setFetchTypeIsSet(true);
+  }
+
+  public void unsetFetchType() {
+    __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __FETCHTYPE_ISSET_ID);
+  }
+
+  /** Returns true if field fetchType is set (has been assigned a value) and false otherwise */
+  public boolean isSetFetchType() {
+    return EncodingUtils.testBit(__isset_bitfield, __FETCHTYPE_ISSET_ID);
+  }
+
+  public void setFetchTypeIsSet(boolean value) {
+    __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __FETCHTYPE_ISSET_ID, value);
+  }
+
   public void setFieldValue(_Fields field, Object value) {
     switch (field) {
     case OPERATION_HANDLE:
@@ -278,6 +314,14 @@ public class TFetchResultsReq implements
       }
       break;
 
+    case FETCH_TYPE:
+      if (value == null) {
+        unsetFetchType();
+      } else {
+        setFetchType((Short)value);
+      }
+      break;
+
     }
   }
 
@@ -292,6 +336,9 @@ public class TFetchResultsReq implements
     case MAX_ROWS:
       return Long.valueOf(getMaxRows());
 
+    case FETCH_TYPE:
+      return Short.valueOf(getFetchType());
+
     }
     throw new IllegalStateException();
   }
@@ -309,6 +356,8 @@ public class TFetchResultsReq implements
       return isSetOrientation();
     case MAX_ROWS:
       return isSetMaxRows();
+    case FETCH_TYPE:
+      return isSetFetchType();
     }
     throw new IllegalStateException();
   }
@@ -353,6 +402,15 @@ public class TFetchResultsReq implements
         return false;
     }
 
+    boolean this_present_fetchType = true && this.isSetFetchType();
+    boolean that_present_fetchType = true && that.isSetFetchType();
+    if (this_present_fetchType || that_present_fetchType) {
+      if (!(this_present_fetchType && that_present_fetchType))
+        return false;
+      if (this.fetchType != that.fetchType)
+        return false;
+    }
+
     return true;
   }
 
@@ -375,6 +433,11 @@ public class TFetchResultsReq implements
     if (present_maxRows)
       builder.append(maxRows);
 
+    boolean present_fetchType = true && (isSetFetchType());
+    builder.append(present_fetchType);
+    if (present_fetchType)
+      builder.append(fetchType);
+
     return builder.toHashCode();
   }
 
@@ -416,6 +479,16 @@ public class TFetchResultsReq implements
         return lastComparison;
       }
     }
+    lastComparison = Boolean.valueOf(isSetFetchType()).compareTo(typedOther.isSetFetchType());
+    if (lastComparison != 0) {
+      return lastComparison;
+    }
+    if (isSetFetchType()) {
+      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fetchType, typedOther.fetchType);
+      if (lastComparison != 0) {
+        return lastComparison;
+      }
+    }
     return 0;
   }
 
@@ -455,6 +528,12 @@ public class TFetchResultsReq implements
     sb.append("maxRows:");
     sb.append(this.maxRows);
     first = false;
+    if (isSetFetchType()) {
+      if (!first) sb.append(", ");
+      sb.append("fetchType:");
+      sb.append(this.fetchType);
+      first = false;
+    }
     sb.append(")");
     return sb.toString();
   }
@@ -540,6 +619,14 @@ public class TFetchResultsReq implements
               org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
             }
             break;
+          case 4: // FETCH_TYPE
+            if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
+              struct.fetchType = iprot.readI16();
+              struct.setFetchTypeIsSet(true);
+            } else { 
+              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+            }
+            break;
           default:
             org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
         }
@@ -566,6 +653,11 @@ public class TFetchResultsReq implements
       oprot.writeFieldBegin(MAX_ROWS_FIELD_DESC);
       oprot.writeI64(struct.maxRows);
       oprot.writeFieldEnd();
+      if (struct.isSetFetchType()) {
+        oprot.writeFieldBegin(FETCH_TYPE_FIELD_DESC);
+        oprot.writeI16(struct.fetchType);
+        oprot.writeFieldEnd();
+      }
       oprot.writeFieldStop();
       oprot.writeStructEnd();
     }
@@ -586,6 +678,14 @@ public class TFetchResultsReq implements
       struct.operationHandle.write(oprot);
       oprot.writeI32(struct.orientation.getValue());
       oprot.writeI64(struct.maxRows);
+      BitSet optionals = new BitSet();
+      if (struct.isSetFetchType()) {
+        optionals.set(0);
+      }
+      oprot.writeBitSet(optionals, 1);
+      if (struct.isSetFetchType()) {
+        oprot.writeI16(struct.fetchType);
+      }
     }
 
     @Override
@@ -598,6 +698,11 @@ public class TFetchResultsReq implements
       struct.setOrientationIsSet(true);
       struct.maxRows = iprot.readI64();
       struct.setMaxRowsIsSet(true);
+      BitSet incoming = iprot.readBitSet(1);
+      if (incoming.get(0)) {
+        struct.fetchType = iprot.readI16();
+        struct.setFetchTypeIsSet(true);
+      }
     }
   }
 

Modified: hive/branches/tez/service/src/gen/thrift/gen-py/TCLIService/ttypes.py
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/gen/thrift/gen-py/TCLIService/ttypes.py?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/gen/thrift/gen-py/TCLIService/ttypes.py (original)
+++ hive/branches/tez/service/src/gen/thrift/gen-py/TCLIService/ttypes.py Tue Sep  2 19:56:56 2014
@@ -5752,6 +5752,7 @@ class TFetchResultsReq:
    - operationHandle
    - orientation
    - maxRows
+   - fetchType
   """
 
   thrift_spec = (
@@ -5759,12 +5760,14 @@ class TFetchResultsReq:
     (1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
     (2, TType.I32, 'orientation', None,     0, ), # 2
     (3, TType.I64, 'maxRows', None, None, ), # 3
+    (4, TType.I16, 'fetchType', None, 0, ), # 4
   )
 
-  def __init__(self, operationHandle=None, orientation=thrift_spec[2][4], maxRows=None,):
+  def __init__(self, operationHandle=None, orientation=thrift_spec[2][4], maxRows=None, fetchType=thrift_spec[4][4],):
     self.operationHandle = operationHandle
     self.orientation = orientation
     self.maxRows = maxRows
+    self.fetchType = fetchType
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -5791,6 +5794,11 @@ class TFetchResultsReq:
           self.maxRows = iprot.readI64();
         else:
           iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I16:
+          self.fetchType = iprot.readI16();
+        else:
+          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -5813,6 +5821,10 @@ class TFetchResultsReq:
       oprot.writeFieldBegin('maxRows', TType.I64, 3)
       oprot.writeI64(self.maxRows)
       oprot.writeFieldEnd()
+    if self.fetchType is not None:
+      oprot.writeFieldBegin('fetchType', TType.I16, 4)
+      oprot.writeI16(self.fetchType)
+      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 

Modified: hive/branches/tez/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb (original)
+++ hive/branches/tez/service/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb Tue Sep  2 19:56:56 2014
@@ -1598,11 +1598,13 @@ class TFetchResultsReq
   OPERATIONHANDLE = 1
   ORIENTATION = 2
   MAXROWS = 3
+  FETCHTYPE = 4
 
   FIELDS = {
     OPERATIONHANDLE => {:type => ::Thrift::Types::STRUCT, :name => 'operationHandle', :class => ::TOperationHandle},
     ORIENTATION => {:type => ::Thrift::Types::I32, :name => 'orientation', :default =>     0, :enum_class => ::TFetchOrientation},
-    MAXROWS => {:type => ::Thrift::Types::I64, :name => 'maxRows'}
+    MAXROWS => {:type => ::Thrift::Types::I64, :name => 'maxRows'},
+    FETCHTYPE => {:type => ::Thrift::Types::I16, :name => 'fetchType', :default => 0, :optional => true}
   }
 
   def struct_fields; FIELDS; end

Modified: hive/branches/tez/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Tue Sep  2 19:56:56 2014
@@ -30,6 +30,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.logging.Log;
@@ -62,8 +63,6 @@ import org.apache.thrift.transport.TServ
 import org.apache.thrift.transport.TServerTransport;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import com.facebook.fb303.fb_status;
 
 /**
@@ -670,8 +669,11 @@ public class HiveServer extends ThriftHi
 
 
       boolean tcpKeepAlive = conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE);
+      int timeout = (int) HiveConf.getTimeVar(
+          conf, HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS);
 
-      TServerTransport serverTransport = tcpKeepAlive ? new TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, 1000 * conf.getIntVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT));
+      TServerTransport serverTransport =
+          tcpKeepAlive ? new TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, timeout);
 
       // set all properties specified on the command line
       for (Map.Entry<Object, Object> item : hiveconf.entrySet()) {

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/AnonymousAuthenticationProviderImpl.java Tue Sep  2 19:56:56 2014
@@ -20,12 +20,14 @@ package org.apache.hive.service.auth;
 
 import javax.security.sasl.AuthenticationException;
 
+/**
+ * This authentication provider allows any combination of username and password.
+ */
 public class AnonymousAuthenticationProviderImpl implements PasswdAuthenticationProvider {
 
   @Override
   public void Authenticate(String user, String password) throws AuthenticationException {
     // no-op authentication
-    return;
   }
 
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/AuthenticationProviderFactory.java Tue Sep  2 19:56:56 2014
@@ -19,15 +19,18 @@ package org.apache.hive.service.auth;
 
 import javax.security.sasl.AuthenticationException;
 
-public class AuthenticationProviderFactory {
+/**
+ * This class helps select a {@link PasswdAuthenticationProvider} for a given {@code AuthMethod}.
+ */
+public final class AuthenticationProviderFactory {
 
-  public static enum AuthMethods {
+  public enum AuthMethods {
     LDAP("LDAP"),
     PAM("PAM"),
     CUSTOM("CUSTOM"),
     NONE("NONE");
 
-    String authMethod;
+    private final String authMethod;
 
     AuthMethods(String authMethod) {
       this.authMethod = authMethod;
@@ -37,7 +40,8 @@ public class AuthenticationProviderFacto
       return authMethod;
     }
 
-    public static AuthMethods getValidAuthMethod(String authMethodStr) throws AuthenticationException {
+    public static AuthMethods getValidAuthMethod(String authMethodStr)
+      throws AuthenticationException {
       for (AuthMethods auth : AuthMethods.values()) {
         if (authMethodStr.equals(auth.getAuthMethod())) {
           return auth;
@@ -47,24 +51,20 @@ public class AuthenticationProviderFacto
     }
   }
 
-  private AuthenticationProviderFactory () {
+  private AuthenticationProviderFactory() {
   }
 
   public static PasswdAuthenticationProvider getAuthenticationProvider(AuthMethods authMethod)
-      throws AuthenticationException {
-    if (authMethod.equals(AuthMethods.LDAP)) {
+    throws AuthenticationException {
+    if (authMethod == AuthMethods.LDAP) {
       return new LdapAuthenticationProviderImpl();
-    }
-    else if (authMethod.equals(AuthMethods.PAM)) {
+    } else if (authMethod == AuthMethods.PAM) {
       return new PamAuthenticationProviderImpl();
-    }
-    else if (authMethod.equals(AuthMethods.CUSTOM)) {
+    } else if (authMethod == AuthMethods.CUSTOM) {
       return new CustomAuthenticationProviderImpl();
-    }
-    else if (authMethod.equals(AuthMethods.NONE)) {
+    } else if (authMethod == AuthMethods.NONE) {
       return new AnonymousAuthenticationProviderImpl();
-    }
-    else {
+    } else {
       throw new AuthenticationException("Unsupported authentication method");
     }
   }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/CustomAuthenticationProviderImpl.java Tue Sep  2 19:56:56 2014
@@ -22,27 +22,29 @@ import javax.security.sasl.Authenticatio
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.util.ReflectionUtils;
 
-public class CustomAuthenticationProviderImpl
-  implements PasswdAuthenticationProvider {
+/**
+ * This authentication provider implements the {@code CUSTOM} authentication. It allows a {@link
+ * PasswdAuthenticationProvider} to be specified at configuration time which may additionally
+ * implement {@link org.apache.hadoop.conf.Configurable Configurable} to grab Hive's {@link
+ * org.apache.hadoop.conf.Configuration Configuration}.
+ */
+public class CustomAuthenticationProviderImpl implements PasswdAuthenticationProvider {
 
-  Class<? extends PasswdAuthenticationProvider> customHandlerClass;
-  PasswdAuthenticationProvider customProvider;
+  private final PasswdAuthenticationProvider customProvider;
 
   @SuppressWarnings("unchecked")
-  CustomAuthenticationProviderImpl () {
+  CustomAuthenticationProviderImpl() {
     HiveConf conf = new HiveConf();
-    this.customHandlerClass = (Class<? extends PasswdAuthenticationProvider>)
-        conf.getClass(
-            HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
-            PasswdAuthenticationProvider.class);
-    this.customProvider =
-        ReflectionUtils.newInstance(this.customHandlerClass, conf);
+    Class<? extends PasswdAuthenticationProvider> customHandlerClass =
+      (Class<? extends PasswdAuthenticationProvider>) conf.getClass(
+        HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname,
+        PasswdAuthenticationProvider.class);
+    customProvider = ReflectionUtils.newInstance(customHandlerClass, conf);
   }
 
   @Override
-  public void Authenticate(String user, String  password)
-      throws AuthenticationException {
-    this.customProvider.Authenticate(user, password);
+  public void Authenticate(String user, String password) throws AuthenticationException {
+    customProvider.Authenticate(user, password);
   }
 
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java Tue Sep  2 19:56:56 2014
@@ -23,7 +23,6 @@ import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.util.HashMap;
 import java.util.Map;
-
 import javax.security.auth.login.LoginException;
 import javax.security.sasl.Sasl;
 
@@ -41,13 +40,14 @@ import org.apache.thrift.transport.TSock
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.apache.thrift.transport.TTransportFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
+/**
+ * This class helps in some aspects of authentication. It creates the proper Thrift classes for the
+ * given configuration as well as helps with authenticating requests.
+ */
 public class HiveAuthFactory {
-  private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
 
-  public static enum AuthTypes {
+  public enum AuthTypes {
     NOSASL("NOSASL"),
     NONE("NONE"),
     LDAP("LDAP"),
@@ -55,7 +55,7 @@ public class HiveAuthFactory {
     CUSTOM("CUSTOM"),
     PAM("PAM");
 
-    private String authType;
+    private final String authType;
 
     AuthTypes(String authType) {
       this.authType = authType;
@@ -65,11 +65,11 @@ public class HiveAuthFactory {
       return authType;
     }
 
-  };
+  }
 
-  private HadoopThriftAuthBridge.Server saslServer = null;
+  private HadoopThriftAuthBridge.Server saslServer;
   private String authTypeStr;
-  private String transportMode;
+  private final String transportMode;
   private final HiveConf conf;
 
   public static final String HS2_PROXY_USER = "hive.server2.proxy.user";
@@ -81,21 +81,19 @@ public class HiveAuthFactory {
     authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION);
 
     // In http mode we use NOSASL as the default auth type
-    if (transportMode.equalsIgnoreCase("http")) {
+    if ("http".equalsIgnoreCase(transportMode)) {
       if (authTypeStr == null) {
         authTypeStr = AuthTypes.NOSASL.getAuthName();
       }
-    }
-    else {
+    } else {
       if (authTypeStr == null) {
         authTypeStr = AuthTypes.NONE.getAuthName();
       }
       if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())
           && ShimLoader.getHadoopShims().isSecureShimImpl()) {
-        saslServer = ShimLoader.getHadoopThriftAuthBridge().createServer(
-            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
-            conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)
-            );
+        saslServer = ShimLoader.getHadoopThriftAuthBridge()
+          .createServer(conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB),
+                        conf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL));
         // start delegation token manager
         try {
           saslServer.startDelegationTokenSecretManager(conf, null);
@@ -108,8 +106,7 @@ public class HiveAuthFactory {
 
   public Map<String, String> getSaslProperties() {
     Map<String, String> saslProps = new HashMap<String, String>();
-    SaslQOP saslQOP =
-        SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
+    SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP));
     saslProps.put(Sasl.QOP, saslQOP.toString());
     saslProps.put(Sasl.SERVER_AUTH, "true");
     return saslProps;
@@ -139,12 +136,10 @@ public class HiveAuthFactory {
     return transportFactory;
   }
 
-  public TProcessorFactory getAuthProcFactory(ThriftCLIService service)
-      throws LoginException {
-    if (transportMode.equalsIgnoreCase("http")) {
+  public TProcessorFactory getAuthProcFactory(ThriftCLIService service) throws LoginException {
+    if ("http".equalsIgnoreCase(transportMode)) {
       return HttpAuthUtils.getAuthProcFactory(service);
-    }
-    else {
+    } else {
       if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
         return KerberosSaslHelper.getKerberosProcessorFactory(saslServer, service);
       } else {
@@ -154,18 +149,14 @@ public class HiveAuthFactory {
   }
 
   public String getRemoteUser() {
-    if (saslServer != null) {
-      return saslServer.getRemoteUser();
-    } else {
-      return null;
-    }
+    return saslServer == null ? null : saslServer.getRemoteUser();
   }
 
   public String getIpAddress() {
-    if(saslServer != null && saslServer.getRemoteAddress() != null) {
-      return saslServer.getRemoteAddress().getHostAddress();
-    } else {
+    if (saslServer == null || saslServer.getRemoteAddress() == null) {
       return null;
+    } else {
+      return saslServer.getRemoteAddress().getHostAddress();
     }
   }
 
@@ -173,62 +164,58 @@ public class HiveAuthFactory {
   public static void loginFromKeytab(HiveConf hiveConf) throws IOException {
     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
-    if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
-      ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
+    if (principal.isEmpty() || keyTabFile.isEmpty()) {
+      throw new IOException("HiveServer2 Kerberos principal or keytab is not correctly configured");
     } else {
-      throw new IOException ("HiveServer2 kerberos principal or keytab " +
-          "is not correctly configured");
+      ShimLoader.getHadoopShims().loginUserFromKeytab(principal, keyTabFile);
     }
   }
 
-  // Perform spnego login using the hadoop shim API if the configuration is available
-  public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(
-      HiveConf hiveConf) throws IOException {
+  // Perform SPNEGO login using the hadoop shim API if the configuration is available
+  public static UserGroupInformation loginFromSpnegoKeytabAndReturnUGI(HiveConf hiveConf)
+    throws IOException {
     String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
     String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
-    if (!principal.isEmpty() && !keyTabFile.isEmpty()) {
-      return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(
-          principal, keyTabFile);
+    if (principal.isEmpty() || keyTabFile.isEmpty()) {
+      throw new IOException("HiveServer2 SPNEGO principal or keytab is not correctly configured");
     } else {
-      throw new IOException ("HiveServer2 SPNego principal or keytab " +
-          "is not correctly configured");
+      return ShimLoader.getHadoopShims().loginUserFromKeytabAndReturnUGI(principal, keyTabFile);
     }
   }
 
-  public static TTransport getSocketTransport(String host, int port, int loginTimeout)
-      throws TTransportException {
+  public static TTransport getSocketTransport(String host, int port, int loginTimeout) {
     return new TSocket(host, port, loginTimeout);
   }
 
   public static TTransport getSSLSocket(String host, int port, int loginTimeout)
-      throws TTransportException {
+    throws TTransportException {
     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
   }
 
   public static TTransport getSSLSocket(String host, int port, int loginTimeout,
-      String trustStorePath, String trustStorePassWord) throws TTransportException {
+    String trustStorePath, String trustStorePassWord) throws TTransportException {
     TSSLTransportFactory.TSSLTransportParameters params =
-        new TSSLTransportFactory.TSSLTransportParameters();
+      new TSSLTransportFactory.TSSLTransportParameters();
     params.setTrustStore(trustStorePath, trustStorePassWord);
     params.requireClientAuth(true);
     return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
   }
 
   public static TServerSocket getServerSocket(String hiveHost, int portNum)
-      throws TTransportException {
-    InetSocketAddress serverAddress = null;
-    if (hiveHost != null && !hiveHost.isEmpty()) {
-      serverAddress = new InetSocketAddress(hiveHost, portNum);
+    throws TTransportException {
+    InetSocketAddress serverAddress;
+    if (hiveHost == null || hiveHost.isEmpty()) {
+      serverAddress = new InetSocketAddress(portNum);
     } else {
-      serverAddress = new  InetSocketAddress(portNum);
+      serverAddress = new InetSocketAddress(hiveHost, portNum);
     }
-    return new TServerSocket(serverAddress );
+    return new TServerSocket(serverAddress);
   }
 
-  public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
-      String keyStorePath, String keyStorePassWord) throws TTransportException, UnknownHostException {
+  public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, String keyStorePath,
+    String keyStorePassWord) throws TTransportException, UnknownHostException {
     TSSLTransportFactory.TSSLTransportParameters params =
-        new TSSLTransportFactory.TSSLTransportParameters();
+      new TSSLTransportFactory.TSSLTransportParameters();
     params.setKeyStore(keyStorePath, keyStorePassWord);
 
     InetAddress serverAddress;
@@ -243,8 +230,7 @@ public class HiveAuthFactory {
   // retrieve delegation token for the given user
   public String getDelegationToken(String owner, String renewer) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
 
     try {
@@ -263,8 +249,7 @@ public class HiveAuthFactory {
   // cancel given delegation token
   public void cancelDelegationToken(String delegationToken) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
     try {
       saslServer.cancelDelegationToken(delegationToken);
@@ -275,8 +260,7 @@ public class HiveAuthFactory {
 
   public void renewDelegationToken(String delegationToken) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
     try {
       saslServer.renewDelegationToken(delegationToken);
@@ -287,21 +271,21 @@ public class HiveAuthFactory {
 
   public String getUserFromToken(String delegationToken) throws HiveSQLException {
     if (saslServer == null) {
-      throw new HiveSQLException(
-          "Delegation token only supported over kerberos authentication");
+      throw new HiveSQLException("Delegation token only supported over kerberos authentication");
     }
     try {
       return saslServer.getUserFromToken(delegationToken);
     } catch (IOException e) {
-      throw new HiveSQLException("Error extracting user from delegation token " + delegationToken, e);
+      throw new HiveSQLException("Error extracting user from delegation token " + delegationToken,
+                                 e);
     }
   }
 
   public static void verifyProxyAccess(String realUser, String proxyUser, String ipAddress,
-      HiveConf hiveConf) throws HiveSQLException {
-    UserGroupInformation sessionUgi;
+    HiveConf hiveConf) throws HiveSQLException {
 
     try {
+      UserGroupInformation sessionUgi;
       if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
         sessionUgi = ShimLoader.getHadoopShims().createProxyUser(realUser);
       } else {
@@ -309,11 +293,11 @@ public class HiveAuthFactory {
       }
       if (!proxyUser.equalsIgnoreCase(realUser)) {
         ShimLoader.getHadoopShims().
-        authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
+          authorizeProxyAccess(proxyUser, sessionUgi, ipAddress, hiveConf);
       }
     } catch (IOException e) {
-      throw new HiveSQLException("Failed to validate proxy privilage of " + realUser +
-          " for " + proxyUser, e);
+      throw new HiveSQLException(
+        "Failed to validate proxy privilege of " + realUser + " for " + proxyUser, e);
     }
   }
 

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthUtils.java Tue Sep  2 19:56:56 2014
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.hive.service.auth;
 
 import java.io.IOException;
@@ -36,110 +35,95 @@ import org.apache.thrift.TProcessorFacto
 import org.apache.thrift.transport.TTransport;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSCredential;
-import org.ietf.jgss.GSSException;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.ietf.jgss.Oid;
 
 /**
- *
- * Utility functions for http mode authentication
- *
+ * Utility functions for HTTP mode authentication.
  */
-public class HttpAuthUtils {
+public final class HttpAuthUtils {
 
   public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
   public static final String AUTHORIZATION = "Authorization";
   public static final String BASIC = "Basic";
   public static final String NEGOTIATE = "Negotiate";
 
-  public static class HttpCLIServiceProcessorFactory extends TProcessorFactory {
-    private final ThriftCLIService service;
-    private final HiveConf hiveConf;
-    private final boolean isDoAsEnabled;
-
-    public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
-      super(null);
-      this.service = service;
-      this.hiveConf = service.getHiveConf();
-      this.isDoAsEnabled = hiveConf.getBoolVar(
-          HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
-    }
-
-    @Override
-    public TProcessor getProcessor(TTransport trans) {
-      TProcessor baseProcessor = new TCLIService.Processor<Iface>(service);
-      return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) :
-        baseProcessor;
-    }
-  }
-
   public static TProcessorFactory getAuthProcFactory(ThriftCLIService service) {
     return new HttpCLIServiceProcessorFactory(service);
   }
 
   /**
-   *
    * @return Stringified Base64 encoded kerberosAuthHeader on success
-   * @throws GSSException
-   * @throws IOException
-   * @throws InterruptedException
    */
-  public static String getKerberosServiceTicket(String principal,
-      String host, String serverHttpUrl)
-          throws GSSException, IOException, InterruptedException {
+  public static String getKerberosServiceTicket(String principal, String host, String serverHttpUrl)
+    throws IOException, InterruptedException {
     UserGroupInformation clientUGI = getClientUGI("kerberos");
     String serverPrincipal = getServerPrincipal(principal, host);
     // Uses the Ticket Granting Ticket in the UserGroupInformation
-    return clientUGI.doAs(new HttpKerberosClientAction(serverPrincipal,
-        clientUGI.getShortUserName(), serverHttpUrl));
+    return clientUGI.doAs(
+      new HttpKerberosClientAction(serverPrincipal, clientUGI.getShortUserName(), serverHttpUrl));
   }
 
   /**
-   * Get server pricipal and verify that hostname is present
-   * @return
-   * @throws IOException
+   * Get server principal and verify that hostname is present.
    */
-  private static String getServerPrincipal(String principal, String host)
-      throws IOException {
-    return ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(
-        principal, host);
+  private static String getServerPrincipal(String principal, String host) throws IOException {
+    return ShimLoader.getHadoopThriftAuthBridge().getServerPrincipal(principal, host);
   }
 
   /**
    * JAAS login to setup the client UserGroupInformation.
-   * Sets up the kerberos Ticket Granting Ticket,
-   * in the client UserGroupInformation object
+   * Sets up the Kerberos Ticket Granting Ticket,
+   * in the client UserGroupInformation object.
+   *
    * @return Client's UserGroupInformation
-   * @throws IOException
    */
-  public static UserGroupInformation getClientUGI(String authType)
-      throws IOException {
+  public static UserGroupInformation getClientUGI(String authType) throws IOException {
     return ShimLoader.getHadoopThriftAuthBridge().getCurrentUGIWithConf(authType);
   }
 
-  /**
-   *
-   * HttpKerberosClientAction
-   *
-   */
-  public static class HttpKerberosClientAction implements
-  PrivilegedExceptionAction<String> {
-    String serverPrincipal;
-    String clientUserName;
-    String serverHttpUrl;
-    private final Base64 base64codec;
+  private HttpAuthUtils() {
+    throw new UnsupportedOperationException("Can't initialize class");
+  }
+
+  public static class HttpCLIServiceProcessorFactory extends TProcessorFactory {
+
+    private final ThriftCLIService service;
+    private final HiveConf hiveConf;
+    private final boolean isDoAsEnabled;
+
+    public HttpCLIServiceProcessorFactory(ThriftCLIService service) {
+      super(null);
+      this.service = service;
+      hiveConf = service.getHiveConf();
+      isDoAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
+    }
+
+    @Override
+    public TProcessor getProcessor(TTransport trans) {
+      TProcessor baseProcessor = new TCLIService.Processor<Iface>(service);
+      return isDoAsEnabled ? new HttpCLIServiceUGIProcessor(baseProcessor) : baseProcessor;
+    }
+  }
+
+  public static class HttpKerberosClientAction implements PrivilegedExceptionAction<String> {
+
     public static final String HTTP_RESPONSE = "HTTP_RESPONSE";
     public static final String SERVER_HTTP_URL = "SERVER_HTTP_URL";
+    private final String serverPrincipal;
+    private final String clientUserName;
+    private final String serverHttpUrl;
+    private final Base64 base64codec;
     private final HttpContext httpContext;
 
-    public HttpKerberosClientAction(String serverPrincipal,
-        String clientUserName, String serverHttpUrl) {
+    public HttpKerberosClientAction(String serverPrincipal, String clientUserName,
+      String serverHttpUrl) {
       this.serverPrincipal = serverPrincipal;
       this.clientUserName = clientUserName;
       this.serverHttpUrl = serverHttpUrl;
-      this.base64codec = new Base64(0);
-      this.httpContext = new BasicHttpContext();
+      base64codec = new Base64(0);
+      httpContext = new BasicHttpContext();
       httpContext.setAttribute(SERVER_HTTP_URL, serverHttpUrl);
     }
 
@@ -158,8 +142,8 @@ public class HttpAuthUtils {
       GSSName serverName = manager.createName(serverPrincipal, krb5PrincipalOid);
 
       // GSS credentials for client
-      GSSCredential clientCreds = manager.createCredential(clientName,
-          GSSCredential.DEFAULT_LIFETIME, mechOid,
+      GSSCredential clientCreds =
+        manager.createCredential(clientName, GSSCredential.DEFAULT_LIFETIME, mechOid,
           GSSCredential.INITIATE_ONLY);
 
       /*
@@ -170,22 +154,20 @@ public class HttpAuthUtils {
        *      use. The client chooses the mechanism to use.
        *    - clientCreds are the client credentials
        */
-      GSSContext gssContext = manager.createContext(serverName,
-          mechOid, clientCreds, GSSContext.DEFAULT_LIFETIME);
+      GSSContext gssContext =
+        manager.createContext(serverName, mechOid, clientCreds, GSSContext.DEFAULT_LIFETIME);
 
       // Mutual authentication not r
       gssContext.requestMutualAuth(false);
 
-      // Estabilish context
+      // Establish context
       byte[] inToken = new byte[0];
-      byte[] outToken;
 
-      outToken = gssContext.initSecContext(inToken, 0, inToken.length);
+      byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length);
 
       gssContext.dispose();
       // Base64 encoded and stringified token for server
-      String authHeaderBase64String = new String(base64codec.encode(outToken));
-      return authHeaderBase64String;
+      return new String(base64codec.encode(outToken));
     }
   }
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpAuthenticationException.java Tue Sep  2 19:56:56 2014
@@ -14,26 +14,27 @@
 
 package org.apache.hive.service.auth;
 
-public class HttpAuthenticationException extends Exception{
-  static final long serialVersionUID = 0;
+public class HttpAuthenticationException extends Exception {
+
+  private static final long serialVersionUID = 0;
 
   /**
-   * @param cause original exception.
+   * @param cause original exception
    */
   public HttpAuthenticationException(Throwable cause) {
     super(cause);
   }
 
   /**
-   * @param msg exception message.
+   * @param msg exception message
    */
   public HttpAuthenticationException(String msg) {
     super(msg);
   }
 
   /**
-   * @param msg exception message.
-   * @param cause original exception.
+   * @param msg   exception message
+   * @param cause original exception
    */
   public HttpAuthenticationException(String msg, Throwable cause) {
     super(msg, cause);

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/HttpCLIServiceUGIProcessor.java Tue Sep  2 19:56:56 2014
@@ -31,14 +31,11 @@ import org.apache.thrift.TProcessor;
 import org.apache.thrift.protocol.TProtocol;
 
 /**
- *
- * Wraps the underlying thrift processor's process call,
+ * Wraps the underlying Thrift processor's process call,
  * to assume the client user's UGI/Subject for the doAs calls.
- * Gets the client's username from a threadlocal in SessionManager which is
+ * Gets the client's username from a ThreadLocal in SessionManager which is
  * set in the ThriftHttpServlet, and constructs a client UGI object from that.
- *
  */
-
 public class HttpCLIServiceUGIProcessor implements TProcessor {
 
   private final TProcessor underlyingProcessor;
@@ -46,18 +43,18 @@ public class HttpCLIServiceUGIProcessor 
 
   public HttpCLIServiceUGIProcessor(TProcessor underlyingProcessor) {
     this.underlyingProcessor = underlyingProcessor;
-    this.shim = ShimLoader.getHadoopShims();
+    shim = ShimLoader.getHadoopShims();
   }
 
   @Override
   public boolean process(final TProtocol in, final TProtocol out) throws TException {
-    /**
-     * Build the client UGI from threadlocal username [SessionManager.getUserName()].
-     * The threadlocal username is set in the ThriftHttpServlet.
+    /*
+     * Build the client UGI from ThreadLocal username [SessionManager.getUserName()].
+     * The ThreadLocal username is set in the ThriftHttpServlet.
      */
-    UserGroupInformation clientUgi = null;
     try {
-      clientUgi = shim.createRemoteUser(SessionManager.getUserName(), new ArrayList<String>());
+      UserGroupInformation clientUgi =
+        shim.createRemoteUser(SessionManager.getUserName(), new ArrayList<String>());
       return shim.doAs(clientUgi, new PrivilegedExceptionAction<Boolean>() {
         @Override
         public Boolean run() {
@@ -68,10 +65,9 @@ public class HttpCLIServiceUGIProcessor 
           }
         }
       });
-    }
-    catch (RuntimeException rte) {
+    } catch (RuntimeException rte) {
       if (rte.getCause() instanceof TException) {
-        throw (TException)rte.getCause();
+        throw (TException) rte.getCause();
       }
       throw rte;
     } catch (InterruptedException ie) {

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/KerberosSaslHelper.java Tue Sep  2 19:56:56 2014
@@ -19,7 +19,6 @@ package org.apache.hive.service.auth;
 
 import java.io.IOException;
 import java.util.Map;
-
 import javax.security.sasl.SaslException;
 
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -33,37 +32,20 @@ import org.apache.thrift.TProcessorFacto
 import org.apache.thrift.transport.TSaslClientTransport;
 import org.apache.thrift.transport.TTransport;
 
-public class KerberosSaslHelper {
-
-  private static class CLIServiceProcessorFactory extends TProcessorFactory {
-    private final ThriftCLIService service;
-    private final Server saslServer;
-
-    public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
-      super(null);
-      this.service = service;
-      this.saslServer = saslServer;
-    }
-
-    @Override
-    public TProcessor getProcessor(TTransport trans) {
-      TProcessor sqlProcessor = new TCLIService.Processor<Iface>(service);
-      return saslServer.wrapNonAssumingProcessor(sqlProcessor);
-    }
-  }
+public final class KerberosSaslHelper {
 
   public static TProcessorFactory getKerberosProcessorFactory(Server saslServer,
-      ThriftCLIService service) {
-    return new CLIServiceProcessorFactory (saslServer, service);
+    ThriftCLIService service) {
+    return new CLIServiceProcessorFactory(saslServer, service);
   }
 
   public static TTransport getKerberosTransport(String principal, String host,
-      final TTransport underlyingTransport, Map<String, String> saslProps, boolean assumeSubject) throws SaslException {
+    TTransport underlyingTransport, Map<String, String> saslProps, boolean assumeSubject)
+    throws SaslException {
     try {
-      final String names[] = principal.split("[/@]");
+      String[] names = principal.split("[/@]");
       if (names.length != 3) {
-        throw new IllegalArgumentException("Kerberos principal should have 3 parts: "
-            + principal);
+        throw new IllegalArgumentException("Kerberos principal should have 3 parts: " + principal);
       }
 
       if (assumeSubject) {
@@ -71,20 +53,21 @@ public class KerberosSaslHelper {
       } else {
         HadoopThriftAuthBridge.Client authBridge =
           ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
-        return authBridge.createClientTransport(principal, host,
-          "KERBEROS", null, underlyingTransport, saslProps);
+        return authBridge.createClientTransport(principal, host, "KERBEROS", null,
+                                                underlyingTransport, saslProps);
       }
     } catch (IOException e) {
       throw new SaslException("Failed to open client transport", e);
     }
   }
 
-  public static TTransport createSubjectAssumedTransport(String principal, 
-		  TTransport underlyingTransport, Map<String, String> saslProps) throws IOException {
-    TTransport saslTransport = null;
-    final String names[] = principal.split("[/@]");
+  public static TTransport createSubjectAssumedTransport(String principal,
+    TTransport underlyingTransport, Map<String, String> saslProps) throws IOException {
+    String[] names = principal.split("[/@]");
     try {
-      saslTransport = new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null, underlyingTransport);
+      TTransport saslTransport =
+        new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
+          underlyingTransport);
       return new TSubjectAssumingTransport(saslTransport);
     } catch (SaslException se) {
       throw new IOException("Could not instantiate SASL transport", se);
@@ -92,15 +75,37 @@ public class KerberosSaslHelper {
   }
 
   public static TTransport getTokenTransport(String tokenStr, String host,
-      final TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
+    TTransport underlyingTransport, Map<String, String> saslProps) throws SaslException {
     HadoopThriftAuthBridge.Client authBridge =
       ShimLoader.getHadoopThriftAuthBridge().createClientWithConf("kerberos");
 
     try {
-      return authBridge.createClientTransport(null, host,
-          "DIGEST", tokenStr, underlyingTransport, saslProps);
+      return authBridge.createClientTransport(null, host, "DIGEST", tokenStr, underlyingTransport,
+                                              saslProps);
     } catch (IOException e) {
       throw new SaslException("Failed to open client transport", e);
     }
   }
+
+  private KerberosSaslHelper() {
+    throw new UnsupportedOperationException("Can't initialize class");
+  }
+
+  private static class CLIServiceProcessorFactory extends TProcessorFactory {
+
+    private final ThriftCLIService service;
+    private final Server saslServer;
+
+    public CLIServiceProcessorFactory(Server saslServer, ThriftCLIService service) {
+      super(null);
+      this.service = service;
+      this.saslServer = saslServer;
+    }
+
+    @Override
+    public TProcessor getProcessor(TTransport trans) {
+      TProcessor sqlProcessor = new TCLIService.Processor<Iface>(service);
+      return saslServer.wrapNonAssumingProcessor(sqlProcessor);
+    }
+  }
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/LdapAuthenticationProviderImpl.java Tue Sep  2 19:56:56 2014
@@ -18,10 +18,8 @@
 package org.apache.hive.service.auth;
 
 import java.util.Hashtable;
-
 import javax.naming.Context;
 import javax.naming.NamingException;
-import javax.naming.directory.DirContext;
 import javax.naming.directory.InitialDirContext;
 import javax.security.sasl.AuthenticationException;
 
@@ -33,16 +31,15 @@ public class LdapAuthenticationProviderI
   private final String baseDN;
   private final String ldapDomain;
 
-  LdapAuthenticationProviderImpl () {
+  LdapAuthenticationProviderImpl() {
     HiveConf conf = new HiveConf();
-    this.ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
-    this.baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
-    this.ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
+    ldapURL = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL);
+    baseDN = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN);
+    ldapDomain = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
   }
 
   @Override
-  public void Authenticate(String user, String  password)
-      throws AuthenticationException {
+  public void Authenticate(String user, String password) throws AuthenticationException {
 
     Hashtable<String, Object> env = new Hashtable<String, Object>();
     env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
@@ -51,15 +48,15 @@ public class LdapAuthenticationProviderI
     //  If the domain is supplied, then append it. LDAP providers like Active Directory
     // use a fully qualified user name like foo@bar.com.
     if (ldapDomain != null) {
-      user  = user + "@" + ldapDomain;
+      user = user + "@" + ldapDomain;
     }
 
     // setup the security principal
     String bindDN;
-    if (baseDN != null) {
-      bindDN = "uid=" + user + "," + baseDN;
-    } else {
+    if (baseDN == null) {
       bindDN = user;
+    } else {
+      bindDN = "uid=" + user + "," + baseDN;
     }
     env.put(Context.SECURITY_AUTHENTICATION, "simple");
     env.put(Context.SECURITY_PRINCIPAL, bindDN);
@@ -67,12 +64,11 @@ public class LdapAuthenticationProviderI
 
     try {
       // Create initial context
-      DirContext ctx = new InitialDirContext(env);
+      Context ctx = new InitialDirContext(env);
       ctx.close();
     } catch (NamingException e) {
       throw new AuthenticationException("Error validating LDAP user", e);
     }
-  return;
   }
 
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/PamAuthenticationProviderImpl.java Tue Sep  2 19:56:56 2014
@@ -20,33 +20,32 @@ package org.apache.hive.service.auth;
 import javax.security.sasl.AuthenticationException;
 
 import net.sf.jpam.Pam;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 
 public class PamAuthenticationProviderImpl implements PasswdAuthenticationProvider {
 
   private final String pamServiceNames;
 
-  PamAuthenticationProviderImpl () {
+  PamAuthenticationProviderImpl() {
     HiveConf conf = new HiveConf();
-    this.pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
+    pamServiceNames = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PAM_SERVICES);
   }
 
   @Override
-  public void Authenticate(String user, String  password)
-      throws AuthenticationException {
+  public void Authenticate(String user, String password) throws AuthenticationException {
 
     if (pamServiceNames == null || pamServiceNames.trim().isEmpty()) {
       throw new AuthenticationException("No PAM services are set.");
     }
 
-    String pamServices[] = pamServiceNames.split(",");
+    String[] pamServices = pamServiceNames.split(",");
     for (String pamService : pamServices) {
       Pam pam = new Pam(pamService);
       boolean isAuthenticated = pam.authenticateSuccessful(user, password);
       if (!isAuthenticated) {
-        throw new AuthenticationException("Error authenticating with the PAM service: " + pamService);
+        throw new AuthenticationException(
+          "Error authenticating with the PAM service: " + pamService);
       }
     }
   }
-}
\ No newline at end of file
+}