You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 05:44:26 UTC

svn commit: r1629562 [2/38] - in /hive/branches/spark: ./ accumulo-handler/ beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/common/type/ contrib/src...

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleGroupConcat.java Mon Oct  6 03:44:13 2014
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.contrib.u
 import java.util.ArrayList;
 import java.util.Collections;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 
@@ -36,8 +35,6 @@ import org.apache.hadoop.hive.ql.exec.UD
  * implement built-in aggregation functions, which are harder to program but
  * more efficient.
  */
-@Description(name = "example_group_concat",
-value = "_FUNC_(col) - Example UDAF that concatenates all arguments from different rows into a single string")
 public class UDAFExampleGroupConcat extends UDAF {
 
   /**

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMaxN.java Mon Oct  6 03:44:13 2014
@@ -19,13 +19,11 @@
 
 package org.apache.hadoop.hive.contrib.udaf.example;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 
 /**
  * Returns the max N double values.
  */
-@Description(name = "example_max_n", value = "_FUNC_(expr) - Example UDAF that returns the max N double values")
 public class UDAFExampleMaxN extends UDAF {
 
   /**

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMinN.java Mon Oct  6 03:44:13 2014
@@ -19,13 +19,11 @@
 
 package org.apache.hadoop.hive.contrib.udaf.example;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 
 /**
  * Returns the min N double values.
  */
-@Description(name = "example_min_n", value = "_FUNC_(expr) - Example UDAF that returns the min N double values")
 public class UDAFExampleMinN extends UDAF{
 
   /**

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java Mon Oct  6 03:44:13 2014
@@ -17,14 +17,12 @@
  */
 package org.apache.hadoop.hive.contrib.udf.example;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleAdd.
  *
  */
-@Description(name = "example_add", value = "_FUNC_(expr) - Example UDAF that returns the sum")
 public class UDFExampleAdd extends UDF {
 
   public Integer evaluate(Integer... a) {

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleArraySum.java Mon Oct  6 03:44:13 2014
@@ -19,14 +19,12 @@ package org.apache.hadoop.hive.contrib.u
 
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleArraySum.
  *
  */
-@Description(name = "example_arraysum", value = "_FUNC_(expr) - Example UDAF that returns the sum")
 public class UDFExampleArraySum extends UDF {
 
   public Double evaluate(List<Double> a) {

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleFormat.java Mon Oct  6 03:44:13 2014
@@ -17,14 +17,12 @@
  */
 package org.apache.hadoop.hive.contrib.udf.example;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleFormat.
  *
  */
-@Description(name = "example_format", value = "_FUNC_(expr) - Example UDAF that returns formated String")
 public class UDFExampleFormat extends UDF {
 
   public String evaluate(String format, Object... args) {

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleMapConcat.java Mon Oct  6 03:44:13 2014
@@ -21,15 +21,12 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Map;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleMapConcat.
  *
  */
-@Description(name = "example_mapconcat",
-value = "_FUNC_(expr) - Example UDAF that returns contents of Map as a formated String")
 public class UDFExampleMapConcat extends UDF {
 
   public String evaluate(Map<String, String> a) {

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleStructPrint.java Mon Oct  6 03:44:13 2014
@@ -19,15 +19,12 @@ package org.apache.hadoop.hive.contrib.u
 
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * UDFExampleStructPrint.
  *
  */
-@Description(name = "example_structprint",
-value = "_FUNC_(obj) - Example UDAF that returns contents of an object")
 public class UDFExampleStructPrint extends UDF {
 
   public String evaluate(Object a) {

Modified: hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java (original)
+++ hive/branches/spark/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java Mon Oct  6 03:44:13 2014
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.contrib.u
 
 import java.util.ArrayList;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
@@ -35,8 +34,6 @@ import org.apache.hadoop.hive.serde2.obj
  * to test outputting of rows on close with lateral view.
  *
  */
-@Description(name = "udtfCount2",
-value = "_FUNC_(col) - UDF outputs the number of rows seen, twice.")
 public class GenericUDTFCount2 extends GenericUDTF {
 
   private transient Integer count = Integer.valueOf(0);

Modified: hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_add.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_add.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_add.q.out (original)
+++ hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_add.q.out Mon Oct  6 03:44:13 2014
@@ -25,24 +25,36 @@ SELECT example_add(1, 2),
 FROM src LIMIT 1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: 3 (type: int), 6 (type: int), 10 (type: int), 3.3000000000000003 (type: double), 6.6 (type: double), 11.0 (type: double), 10.4 (type: double)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 500 Data size: 22000 Basic stats: COMPLETE Column stats: COMPLETE
+              Limit
+                Number of rows: 1
+                Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: 1
       Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: 3 (type: int), 6 (type: int), 10 (type: int), 3.3000000000000003 (type: double), 6.6 (type: double), 11.0 (type: double), 10.4 (type: double)
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-            Statistics: Num rows: 500 Data size: 22000 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 1
-              Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
+        ListSink
 
 PREHOOK: query: SELECT example_add(1, 2),
        example_add(1, 2, 3),

Modified: hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_format.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_format.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_format.q.out (original)
+++ hive/branches/spark/contrib/src/test/results/clientpositive/udf_example_format.q.out Mon Oct  6 03:44:13 2014
@@ -19,24 +19,36 @@ SELECT example_format("abc"),
 FROM src LIMIT 1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-0 is a root stage
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
 
 STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: 'abc' (type: string), '1.1' (type: string), '1.1 1.200000e+00' (type: string), 'a 12 10' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 500 Data size: 182500 Basic stats: COMPLETE Column stats: COMPLETE
+              Limit
+                Number of rows: 1
+                Statistics: Num rows: 1 Data size: 365 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 365 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
   Stage: Stage-0
     Fetch Operator
       limit: 1
       Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: 'abc' (type: string), '1.1' (type: string), '1.1 1.200000e+00' (type: string), 'a 12 10' (type: string)
-            outputColumnNames: _col0, _col1, _col2, _col3
-            Statistics: Num rows: 500 Data size: 182500 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 1
-              Statistics: Num rows: 1 Data size: 365 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
+        ListSink
 
 PREHOOK: query: SELECT example_format("abc"),
        example_format("%1$s", 1.1),

Modified: hive/branches/spark/data/files/parquet_types.txt
URL: http://svn.apache.org/viewvc/hive/branches/spark/data/files/parquet_types.txt?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/data/files/parquet_types.txt (original)
+++ hive/branches/spark/data/files/parquet_types.txt Mon Oct  6 03:44:13 2014
@@ -1,21 +1,21 @@
-100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  |k1:v1|101,200|10,abc
-101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab |k2:v2|102,200|10,def
-102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc|k3:v3|103,200|10,ghi
-103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd|k4:v4|104,200|10,jkl
-104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde|k5:v5|105,200|10,mno
-105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef|k6:v6|106,200|10,pqr
-106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg|k7:v7|107,200|10,stu
-107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh|k8:v8|108,200|10,vwx
-108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|abcdefghijklmnop|k9:v9|109,200|10,yza
-109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef|k10:v10|110,200|10,bcd
-110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede|k11:v11|111,200|10,efg
-111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded|k12:v12|112,200|10,hij
-112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd|k13:v13|113,200|10,klm
-113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc|k14:v14|114,200|10,nop
-114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b|k15:v15|115,200|10,qrs
-115|1|1|1.0|4.5|qrs|2026-04-04 16:16:16.161616161|rstuv|abcded|k16:v16|116,200|10,qrs
-116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded|k17:v17|117,200|10,wxy
-117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded|k18:v18|118,200|10,zab
-118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede|k19:v19|119,200|10,cde
-119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede|k20:v20|120,200|10,fgh
-120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde|k21:v21|121,200|10,ijk
+100|1|1|1.0|0.0|abc|2011-01-01 01:01:01.111111111|a   |a  
+101|2|2|1.1|0.3|def|2012-02-02 02:02:02.222222222|ab  |ab  
+102|3|3|1.2|0.6|ghi|2013-03-03 03:03:03.333333333|abc|abc
+103|1|4|1.3|0.9|jkl|2014-04-04 04:04:04.444444444|abcd|abcd
+104|2|5|1.4|1.2|mno|2015-05-05 05:05:05.555555555|abcde|abcde
+105|3|1|1.0|1.5|pqr|2016-06-06 06:06:06.666666666|abcdef|abcdef
+106|1|2|1.1|1.8|stu|2017-07-07 07:07:07.777777777|abcdefg|abcdefg
+107|2|3|1.2|2.1|vwx|2018-08-08 08:08:08.888888888|bcdefg|abcdefgh
+108|3|4|1.3|2.4|yza|2019-09-09 09:09:09.999999999|cdefg|abcdefghijklmnop
+109|1|5|1.4|2.7|bcd|2020-10-10 10:10:10.101010101|klmno|abcdedef
+110|2|1|1.0|3.0|efg|2021-11-11 11:11:11.111111111|pqrst|abcdede
+111|3|2|1.1|3.3|hij|2022-12-12 12:12:12.121212121|nopqr|abcded
+112|1|3|1.2|3.6|klm|2023-01-02 13:13:13.131313131|opqrs|abcdd
+113|2|4|1.3|3.9|nop|2024-02-02 14:14:14.141414141|pqrst|abc
+114|3|5|1.4|4.2|qrs|2025-03-03 15:15:15.151515151|qrstu|b
+115|1|1|1.0|4.5|tuv|2026-04-04 16:16:16.161616161|rstuv|abcded
+116|2|2|1.1|4.8|wxy|2027-05-05 17:17:17.171717171|stuvw|abcded
+117|3|3|1.2|5.1|zab|2028-06-06 18:18:18.181818181|tuvwx|abcded
+118|1|4|1.3|5.4|cde|2029-07-07 19:19:19.191919191|uvwzy|abcdede
+119|2|5|1.4|5.7|fgh|2030-08-08 20:20:20.202020202|vwxyz|abcdede
+120|3|1|1.0|6.0|ijk|2031-09-09 21:21:21.212121212|wxyza|abcde

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/DefaultHBaseKeyFactory.java Mon Oct  6 03:44:13 2014
@@ -18,9 +18,6 @@
 
 package org.apache.hadoop.hive.hbase;
 
-import java.io.IOException;
-import java.util.Properties;
-
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
@@ -29,6 +26,9 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 
+import java.io.IOException;
+import java.util.Properties;
+
 public class DefaultHBaseKeyFactory extends AbstractHBaseKeyFactory implements HBaseKeyFactory {
 
   protected LazySimpleSerDe.SerDeParameters serdeParams;

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java Mon Oct  6 03:44:13 2014
@@ -53,7 +53,6 @@ public class HBaseSerDe extends Abstract
   public static final String HBASE_COMPOSITE_KEY_CLASS = "hbase.composite.key.class";
   public static final String HBASE_COMPOSITE_KEY_TYPES = "hbase.composite.key.types";
   public static final String HBASE_COMPOSITE_KEY_FACTORY = "hbase.composite.key.factory";
-  public static final String HBASE_STRUCT_SERIALIZER_CLASS = "hbase.struct.serialization.class";
   public static final String HBASE_SCAN_CACHE = "hbase.scan.cache";
   public static final String HBASE_SCAN_CACHEBLOCKS = "hbase.scan.cacheblock";
   public static final String HBASE_SCAN_BATCH = "hbase.scan.batch";
@@ -99,7 +98,7 @@ public class HBaseSerDe extends Abstract
 
     cachedHBaseRow = new LazyHBaseRow(
         (LazySimpleStructObjectInspector) cachedObjectInspector,
-            serdeParams.getKeyIndex(), serdeParams.getKeyFactory(), serdeParams.getValueFactories());
+        serdeParams.getKeyIndex(), serdeParams.getKeyFactory());
 
     serializer = new HBaseRowSerializer(serdeParams);
 

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java Mon Oct  6 03:44:13 2014
@@ -41,10 +41,6 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.avro.AvroObjectInspectorGenerator;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
-import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
-import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.util.StringUtils;
 
@@ -375,19 +371,6 @@ public class HBaseSerDeHelper {
   }
 
   /**
-   * Create the {@link LazyObjectBase lazy field}
-   * */
-  public static LazyObjectBase createLazyField(ColumnMapping[] columnMappings, int fieldID,
-      ObjectInspector inspector) {
-    ColumnMapping colMap = columnMappings[fieldID];
-    if (colMap.getQualifierName() == null && !colMap.isHbaseRowKey()) {
-      // a column family
-      return new LazyHBaseCellMap((LazyMapObjectInspector) inspector);
-    }
-    return LazyFactory.createLazyObject(inspector, colMap.getBinaryStorage().get(0));
-  }
-
-  /**
    * Auto-generates the key struct for composite keys
    * 
    * @param compositeKeyParts map of composite key part name to its type. Usually this would be

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java Mon Oct  6 03:44:13 2014
@@ -29,7 +29,6 @@ import org.apache.hadoop.hive.hbase.Colu
 import org.apache.hadoop.hive.hbase.struct.AvroHBaseValueFactory;
 import org.apache.hadoop.hive.hbase.struct.DefaultHBaseValueFactory;
 import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
-import org.apache.hadoop.hive.hbase.struct.StructHBaseValueFactory;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
@@ -205,21 +204,11 @@ public class HBaseSerDeParameters {
       for (int i = 0; i < columnMappings.size(); i++) {
         String serType = getSerializationType(conf, tbl, columnMappings.getColumnsMapping()[i]);
 
-        if (AVRO_SERIALIZATION_TYPE.equals(serType)) {
+        if (serType != null && serType.equals(AVRO_SERIALIZATION_TYPE)) {
           Schema schema = getSchema(conf, tbl, columnMappings.getColumnsMapping()[i]);
-          valueFactories.add(new AvroHBaseValueFactory(i, schema));
-        } else if (STRUCT_SERIALIZATION_TYPE.equals(serType)) {
-          String structValueClassName = tbl.getProperty(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS);
-
-          if (structValueClassName == null) {
-            throw new IllegalArgumentException(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS
-                + " must be set for hbase columns of type [" + STRUCT_SERIALIZATION_TYPE + "]");
-          }
-
-          Class<?> structValueClass = job.getClassByName(structValueClassName);
-          valueFactories.add(new StructHBaseValueFactory(i, structValueClass));
+          valueFactories.add(new AvroHBaseValueFactory(schema));
         } else {
-          valueFactories.add(new DefaultHBaseValueFactory(i));
+          valueFactories.add(new DefaultHBaseValueFactory());
         }
       }
     } catch (Exception e) {

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java Mon Oct  6 03:44:13 2014
@@ -20,15 +20,15 @@ package org.apache.hadoop.hive.hbase;
 
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.List;
 
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping;
-import org.apache.hadoop.hive.hbase.struct.HBaseValueFactory;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
 import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
+import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 
@@ -47,21 +47,18 @@ public class LazyHBaseRow extends LazySt
 
   private final int iKey;
   private final HBaseKeyFactory keyFactory;
-  private final List<HBaseValueFactory> valueFactories;
 
   public LazyHBaseRow(LazySimpleStructObjectInspector oi) {
-    this(oi, -1, null, null);
+    this(oi, -1, null);
   }
 
   /**
    * Construct a LazyHBaseRow object with the ObjectInspector.
    */
-  public LazyHBaseRow(LazySimpleStructObjectInspector oi, int iKey, HBaseKeyFactory keyFactory,
-      List<HBaseValueFactory> valueFactories) {
+  public LazyHBaseRow(LazySimpleStructObjectInspector oi, int iKey, HBaseKeyFactory keyFactory) {
     super(oi);
     this.iKey = iKey;
     this.keyFactory = keyFactory;
-    this.valueFactories = valueFactories;
   }
 
   /**
@@ -79,14 +76,13 @@ public class LazyHBaseRow extends LazySt
     if (fieldID == iKey) {
       return keyFactory.createKey(fieldRef.getFieldObjectInspector());
     }
-
-    if (valueFactories != null) {
-      return valueFactories.get(fieldID).createValueObject(fieldRef.getFieldObjectInspector());
+    ColumnMapping colMap = columnsMapping[fieldID];
+    if (colMap.qualifierName == null && !colMap.hbaseRowKey) {
+      // a column family
+      return new LazyHBaseCellMap((LazyMapObjectInspector) fieldRef.getFieldObjectInspector());
     }
-
-    // fallback to default
-    return HBaseSerDeHelper.createLazyField(columnsMapping, fieldID,
-        fieldRef.getFieldObjectInspector());
+    return LazyFactory.createLazyObject(fieldRef.getFieldObjectInspector(),
+        colMap.binaryStorage.get(0));
   }
 
   /**

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/AvroHBaseValueFactory.java Mon Oct  6 03:44:13 2014
@@ -48,8 +48,7 @@ public class AvroHBaseValueFactory exten
    * 
    * @param schema the associated {@link Schema schema}
    * */
-  public AvroHBaseValueFactory(int fieldID, Schema schema) {
-    super(fieldID);
+  public AvroHBaseValueFactory(Schema schema) {
     this.schema = schema;
   }
 

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/DefaultHBaseValueFactory.java Mon Oct  6 03:44:13 2014
@@ -21,12 +21,9 @@ import java.io.IOException;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.hbase.ColumnMappings;
-import org.apache.hadoop.hive.hbase.HBaseSerDeHelper;
 import org.apache.hadoop.hive.hbase.HBaseSerDeParameters;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
-import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -38,23 +35,15 @@ import org.apache.hadoop.hive.serde2.typ
 public class DefaultHBaseValueFactory implements HBaseValueFactory{
 
   protected LazySimpleSerDe.SerDeParameters serdeParams;
-  protected ColumnMappings columnMappings;
   protected HBaseSerDeParameters hbaseParams;
   protected Properties properties;
   protected Configuration conf;
 
-  private int fieldID;
-
-  public DefaultHBaseValueFactory(int fieldID) {
-    this.fieldID = fieldID;
-  }
-
 	@Override
   public void init(HBaseSerDeParameters hbaseParams, Configuration conf, Properties properties)
 			throws SerDeException {
     this.hbaseParams = hbaseParams;
     this.serdeParams = hbaseParams.getSerdeParams();
-    this.columnMappings = hbaseParams.getColumnMappings();
     this.properties = properties;
     this.conf = conf;
 	}
@@ -66,11 +55,6 @@ public class DefaultHBaseValueFactory im
         1, serdeParams.getNullSequence(), serdeParams.isEscaped(), serdeParams.getEscapeChar());
 	}
 
-  @Override
-  public LazyObjectBase createValueObject(ObjectInspector inspector) throws SerDeException {
-    return HBaseSerDeHelper.createLazyField(columnMappings.getColumnsMapping(), fieldID, inspector);
-  }
-
 	@Override
 	public byte[] serializeValue(Object object, StructField field)
 			throws IOException {

Modified: hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/HBaseValueFactory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/HBaseValueFactory.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/HBaseValueFactory.java (original)
+++ hive/branches/spark/hbase-handler/src/java/org/apache/hadoop/hive/hbase/struct/HBaseValueFactory.java Mon Oct  6 03:44:13 2014
@@ -22,10 +22,8 @@ import java.io.IOException;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.hbase.HBaseKeyFactory;
 import org.apache.hadoop.hive.hbase.HBaseSerDeParameters;
 import org.apache.hadoop.hive.serde2.SerDeException;
-import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -55,13 +53,6 @@ public interface HBaseValueFactory {
   ObjectInspector createValueObjectInspector(TypeInfo type) throws SerDeException;
 
   /**
-   * create custom object for hbase value
-   *
-   * @param inspector OI create by {@link HBaseKeyFactory#createKeyObjectInspector}
-   */
-  LazyObjectBase createValueObject(ObjectInspector inspector) throws SerDeException;
-
-  /**
    * Serialize the given hive object
    * 
    * @param object the object to be serialized

Modified: hive/branches/spark/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/branches/spark/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Mon Oct  6 03:44:13 2014
@@ -27,7 +27,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
-import junit.framework.Assert;
 import junit.framework.TestCase;
 
 import org.apache.avro.Schema;
@@ -62,7 +61,6 @@ import org.apache.hadoop.hive.serde2.io.
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
-import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.io.BooleanWritable;
@@ -137,27 +135,6 @@ public class TestHBaseSerDe extends Test
       "  ]\n" +
       "}";
 
-  private static final String EXPECTED_DESERIALIZED_AVRO_STRING =
-      "{\"key\":\"test-row1\",\"cola_avro\":{\"arecord\":{\"int1\":42,\"boolean1\":true,"
-          + "\"long1\":42432234234}}}";
-
-  private static final String EXPECTED_DESERIALIZED_AVRO_STRING_2 =
- "{\"key\":\"test-row1\","
-      + "\"cola_avro\":{\"employeename\":\"Avro Employee1\","
-      + "\"employeeid\":11111,\"age\":25,\"gender\":\"FEMALE\","
-      + "\"contactinfo\":{\"address\":[{\"address1\":\"Avro First Address1\",\"address2\":"
-      + "\"Avro Second Address1\",\"city\":\"Avro City1\",\"zipcode\":123456,\"county\":"
-      + "{0:{\"areacode\":999,\"number\":1234567890}},\"aliases\":null,\"metadata\":"
-      + "{\"testkey\":\"testvalue\"}},{\"address1\":\"Avro First Address1\",\"address2\":"
-      + "\"Avro Second Address1\",\"city\":\"Avro City1\",\"zipcode\":123456,\"county\":"
-      + "{0:{\"areacode\":999,\"number\":1234567890}},\"aliases\":null,\"metadata\":"
-      + "{\"testkey\":\"testvalue\"}}],\"homephone\":{\"areacode\":999,\"number\":1234567890},"
-      + "\"officephone\":{\"areacode\":999,\"number\":1234455555}}}}";
-
-  private static final String EXPECTED_DESERIALIZED_AVRO_STRING_3 =
-      "{\"key\":\"test-row1\",\"cola_avro\":{\"arecord\":{\"int1\":42,\"string1\":\"test\","
-          + "\"boolean1\":true,\"long1\":42432234234}}}";
-
   /**
    * Test the default behavior of the Lazy family of objects and object inspectors.
    */
@@ -1070,8 +1047,7 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroSchemaInline();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
-        EXPECTED_DESERIALIZED_AVRO_STRING);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
   }
 
   private Properties createPropertiesForHiveAvroSchemaInline() {
@@ -1116,8 +1092,7 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroForwardEvolvedSchema();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
-        EXPECTED_DESERIALIZED_AVRO_STRING_3);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
   }
 
   private Properties createPropertiesForHiveAvroForwardEvolvedSchema() {
@@ -1161,8 +1136,7 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroBackwardEvolvedSchema();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
-        EXPECTED_DESERIALIZED_AVRO_STRING);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
   }
 
   private Properties createPropertiesForHiveAvroBackwardEvolvedSchema() {
@@ -1211,8 +1185,7 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroSerClass();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
-        EXPECTED_DESERIALIZED_AVRO_STRING_2);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
   }
 
   private Properties createPropertiesForHiveAvroSerClass() {
@@ -1270,8 +1243,7 @@ public class TestHBaseSerDe extends Test
       Properties tbl = createPropertiesForHiveAvroSchemaUrl(onHDFS);
       serDe.initialize(conf, tbl);
 
-      deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
-          EXPECTED_DESERIALIZED_AVRO_STRING);
+      deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
     } finally {
       // Teardown the cluster
       if (miniDfs != null) {
@@ -1326,8 +1298,7 @@ public class TestHBaseSerDe extends Test
     Properties tbl = createPropertiesForHiveAvroExternalSchema();
     serDe.initialize(conf, tbl);
 
-    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData,
-        EXPECTED_DESERIALIZED_AVRO_STRING_2);
+    deserializeAndSerializeHiveAvro(serDe, r, p, expectedFieldsData);
   }
 
   private Properties createPropertiesForHiveAvroExternalSchema() {
@@ -1418,87 +1389,8 @@ public class TestHBaseSerDe extends Test
     return tbl;
   }
 
-  public void testHBaseSerDeCustomStructValue() throws IOException, SerDeException {
-
-    byte[] cfa = "cola".getBytes();
-    byte[] qualStruct = "struct".getBytes();
-
-    TestStruct testStruct = new TestStruct("A", "B", "C", false, (byte) 0);
-    byte[] key = testStruct.getBytes();
-    // Data
-    List<KeyValue> kvs = new ArrayList<KeyValue>();
-
-    byte[] testData = testStruct.getBytes();
-    kvs.add(new KeyValue(key, cfa, qualStruct, testData));
-
-    Result r = new Result(kvs);
-    byte[] putKey = testStruct.getBytesWithDelimiters();
-
-    Put p = new Put(putKey);
-
-    // Post serialization, separators are automatically inserted between different fields in the
-    // struct. Currently there is not way to disable that. So the work around here is to pad the
-    // data with the separator bytes before creating a "Put" object
-    p.add(new KeyValue(putKey, cfa, qualStruct, Bytes.padTail(testData, 2)));
-
-    // Create, initialize, and test the SerDe
-    HBaseSerDe serDe = new HBaseSerDe();
-    Configuration conf = new Configuration();
-    Properties tbl = createPropertiesForValueStruct();
-    serDe.initialize(conf, tbl);
-
-    deserializeAndSerializeHBaseValueStruct(serDe, r, p);
-
-  }
-
-  private Properties createPropertiesForValueStruct() {
-    Properties tbl = new Properties();
-    tbl.setProperty("cola.struct.serialization.type", "struct");
-    tbl.setProperty("cola.struct.test.value", "test value");
-    tbl.setProperty(HBaseSerDe.HBASE_STRUCT_SERIALIZER_CLASS,
-        "org.apache.hadoop.hive.hbase.HBaseTestStructSerializer");
-    tbl.setProperty(serdeConstants.LIST_COLUMNS, "key,astring");
-    tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES,
-        "struct<col1:string,col2:string,col3:string>,struct<col1:string,col2:string,col3:string>");
-    tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING, ":key,cola:struct");
-    tbl.setProperty(HBaseSerDe.HBASE_COMPOSITE_KEY_CLASS,
-        "org.apache.hadoop.hive.hbase.HBaseTestCompositeKey");
-    return tbl;
-  }
-
-  private void deserializeAndSerializeHBaseValueStruct(HBaseSerDe serDe, Result r, Put p)
-      throws SerDeException, IOException {
-    StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
-
-    List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
-
-    Object row = serDe.deserialize(new ResultWritable(r));
-
-    Object fieldData = null;
-    for (int j = 0; j < fieldRefs.size(); j++) {
-      fieldData = soi.getStructFieldData(row, fieldRefs.get(j));
-      assertNotNull(fieldData);
-      if (fieldData instanceof LazyStruct) {
-        assertEquals(((LazyStruct) fieldData).getField(0).toString(), "A");
-        assertEquals(((LazyStruct) fieldData).getField(1).toString(), "B");
-        assertEquals(((LazyStruct) fieldData).getField(2).toString(), "C");
-      } else {
-        Assert.fail("fieldData should be an instance of LazyStruct");
-      }
-    }
-
-    assertEquals(
-        "{\"key\":{\"col1\":\"A\",\"col2\":\"B\",\"col3\":\"C\"},\"astring\":{\"col1\":\"A\",\"col2\":\"B\",\"col3\":\"C\"}}",
-        SerDeUtils.getJSONString(row, soi));
-
-    // Now serialize
-    Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();
-
-    assertEquals("Serialized put:", p.toString(), put.toString());
-  }
-
   private void deserializeAndSerializeHiveAvro(HBaseSerDe serDe, Result r, Put p,
-      Object[] expectedFieldsData, String expectedDeserializedAvroString)
+      Object[] expectedFieldsData)
       throws SerDeException, IOException {
     StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
 
@@ -1511,8 +1403,6 @@ public class TestHBaseSerDe extends Test
       assertNotNull(fieldData);
       assertEquals(expectedFieldsData[j], fieldData.toString().trim());
     }
-    
-    assertEquals(expectedDeserializedAvroString, SerDeUtils.getJSONString(row, soi));
 
     // Now serialize
     Put put = ((PutWritable) serDe.serialize(row, soi)).getPut();

Modified: hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ hive/branches/spark/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Mon Oct  6 03:44:13 2014
@@ -104,7 +104,6 @@ public class HCatSemanticAnalyzer extend
     case HiveParser.TOK_ALTERVIEW_DROPPARTS:
     case HiveParser.TOK_ALTERVIEW_PROPERTIES:
     case HiveParser.TOK_ALTERVIEW_RENAME:
-    case HiveParser.TOK_ALTERVIEW:
     case HiveParser.TOK_CREATEVIEW:
     case HiveParser.TOK_DROPVIEW:
 

Modified: hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java (original)
+++ hive/branches/spark/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java Mon Oct  6 03:44:13 2014
@@ -90,7 +90,6 @@ public class TestHCatPartitionPublish {
     File workDir = handleWorkDir();
     conf.set("yarn.scheduler.capacity.root.queues", "default");
     conf.set("yarn.scheduler.capacity.root.default.capacity", "100");
-    conf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
 
     fs = FileSystem.get(conf);
     System.setProperty("hadoop.log.dir", new File(workDir, "/logs").getAbsolutePath());

Modified: hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml (original)
+++ hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml Mon Oct  6 03:44:13 2014
@@ -77,11 +77,6 @@
             shipped to the target node in the cluster to execute Pig job which uses
             HCat, Hive query, etc.</description>
     </property>
-    <property>
-        <name>templeton.sqoop.path</name>
-        <value>${env.SQOOP_HOME}/bin/sqoop</value>
-        <description>The path to the Sqoop executable.</description>
-    </property>
 
     <property>
         <name>templeton.controller.mr.child.opts</name>

Modified: hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/branches/spark/hcatalog/src/test/e2e/templeton/deployers/env.sh Mon Oct  6 03:44:13 2014
@@ -30,7 +30,7 @@ export PIG_VERSION=0.12.2-SNAPSHOT
 export PROJ_HOME=/Users/${USER}/dev/hive
 export HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
 export HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
-export SQOOP_HOME=/Users/${USER}/dev/sqoop-1.4.4.bin__hadoop-2.0.4-alpha
+#export SQOOP_HOME=/
 
 #Make sure Pig is built for the Hadoop version you are running
 export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build

Modified: hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java Mon Oct  6 03:44:13 2014
@@ -32,7 +32,7 @@ public class QueueStatusBean {
   public JobStatus status;
   public JobProfile profile;
 
-  public final String id;
+  public String id;
   public String parentId;
   public String percentComplete;
   public Long exitValue;
@@ -40,11 +40,8 @@ public class QueueStatusBean {
   public String callback;
   public String completed;
   public Map<String, Object> userargs;
-  public String msg;
 
-  public QueueStatusBean(String jobId, String errMsg) {
-    this.id = jobId;
-    this.msg = errMsg;
+  public QueueStatusBean() {
   }
 
   /**

Modified: hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java Mon Oct  6 03:44:13 2014
@@ -1008,15 +1008,8 @@ public class Server {
       jobItem.id = job;
       if (showDetails) {
         StatusDelegator sd = new StatusDelegator(appConf);
-        try {
-          jobItem.detail = sd.run(getDoAsUser(), job);
-        }
-        catch(Exception ex) {
-          /*if we could not get status for some reason, log it, and send empty status back with
-          * just the ID so that caller knows to even look in the log file*/
-          LOG.info("Failed to get status detail for jobId='" + job + "'", ex);
-          jobItem.detail = new QueueStatusBean(job, "Failed to retrieve status; see WebHCat logs");
-        }
+        QueueStatusBean statusBean = sd.run(getDoAsUser(), job);
+        jobItem.detail = statusBean;
       }
       detailList.add(jobItem);
     }

Modified: hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java (original)
+++ hive/branches/spark/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java Mon Oct  6 03:44:13 2014
@@ -320,6 +320,6 @@ public class TestTempletonUtils {
     result = TempletonUtils.findContainingJar(FileSystem.class, ".*hadoop.*\\.jar.*");
     Assert.assertNotNull(result);
     result = TempletonUtils.findContainingJar(HadoopShimsSecure.class, ".*unknownjar.*");
-    Assert.assertNull("unexpectedly found jar for HadoopShimsSecure class: " + result, result);
+    Assert.assertNull(result);
   }
 }

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Mon Oct  6 03:44:13 2014
@@ -1115,12 +1115,6 @@ public class TestJdbcDriver extends Test
 
   }
 
-  public void testInvalidUrl() throws SQLException {
-    HiveDriver driver = new HiveDriver();
-
-    assertNull(driver.connect("jdbc:hive2://localhost:1000", null));
-  }
-
   private static void assertDpi(DriverPropertyInfo dpi, String name,
       String value) {
     assertEquals("Invalid DriverPropertyInfo name", name, dpi.name);

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTxns.java Mon Oct  6 03:44:13 2014
@@ -76,21 +76,6 @@ public class TestHiveMetaStoreTxns {
   }
 
   @Test
-  public void testOpenTxnNotExcluded() throws Exception {
-    List<Long> tids = client.openTxns("me", 3).getTxn_ids();
-    Assert.assertEquals(1L, (long) tids.get(0));
-    Assert.assertEquals(2L, (long) tids.get(1));
-    Assert.assertEquals(3L, (long) tids.get(2));
-    client.rollbackTxn(1);
-    client.commitTxn(2);
-    ValidTxnList validTxns = client.getValidTxns(3);
-    Assert.assertFalse(validTxns.isTxnCommitted(1));
-    Assert.assertTrue(validTxns.isTxnCommitted(2));
-    Assert.assertTrue(validTxns.isTxnCommitted(3));
-    Assert.assertFalse(validTxns.isTxnCommitted(4));
-  }
-
-  @Test
   public void testTxnRange() throws Exception {
     ValidTxnList validTxns = client.getValidTxns();
     Assert.assertEquals(ValidTxnList.RangeResponse.NONE,

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveMetaStoreIpAddress.java Mon Oct  6 03:44:13 2014
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.metastore
 import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
@@ -50,8 +49,6 @@ public class TestRemoteHiveMetaStoreIpAd
 
     int port = MetaStoreUtils.findFreePort();
     System.out.println("Starting MetaStore Server on port " + port);
-    System.setProperty(ConfVars.METASTORE_EVENT_LISTENERS.varname,
-        IpAddressListener.class.getName());
     MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
     isServerStarted = true;
 

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java Mon Oct  6 03:44:13 2014
@@ -139,7 +139,7 @@ public class TestHiveHistory extends Tes
 
       SessionState.start(ss);
 
-      String cmd = "select a.key+1 from src a";
+      String cmd = "select a.key from src a";
       Driver d = new Driver(conf);
       int ret = d.run(cmd).getResponseCode();
       if (ret != 0) {

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Mon Oct  6 03:44:13 2014
@@ -477,31 +477,4 @@ public class TestBeeLineWithArgs {
     final String EXPECTED_PATTERN = "embedded_table";
     testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
   }
-
-  /**
-   * Test Beeline could show the query progress for time-consuming query.
-   * @throws Throwable
-   */
-  @Test
-  public void testQueryProgress() throws Throwable {
-    final String TEST_NAME = "testQueryProgress";
-    final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
-        "select count(*) from " + tableName + ";\n";
-    final String EXPECTED_PATTERN = "Parsing command";
-    testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
-  }
-
-  /**
-   * Test Beeline will hide the query progress when silent option is set.
-   * @throws Throwable
-   */
-  @Test
-  public void testQueryProgressHidden() throws Throwable {
-    final String TEST_NAME = "testQueryProgress";
-    final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
-        "!set silent true\n" +
-        "select count(*) from " + tableName + ";\n";
-    final String EXPECTED_PATTERN = "Parsing command";
-    testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, getBaseArgs(JDBC_URL));
-  }
 }

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Mon Oct  6 03:44:13 2014
@@ -2130,82 +2130,4 @@ public class TestJdbcDriver2 {
     }
     stmt.close();
   }
-
-  /**
-   * Test getting query log method in Jdbc
-   * @throws Exception
-   */
-  @Test
-  public void testGetQueryLog() throws Exception {
-    // Prepare
-    String[] expectedLogs = {
-        "Parsing command",
-        "Parse Completed",
-        "Starting Semantic Analysis",
-        "Semantic Analysis Completed",
-        "Starting command"
-    };
-    String sql = "select count(*) from " + tableName;
-
-    // Verify the fetched log (from the beginning of log file)
-    HiveStatement stmt = (HiveStatement)con.createStatement();
-    assertNotNull("Statement is null", stmt);
-    stmt.executeQuery(sql);
-    List<String> logs = stmt.getQueryLog(false, 10000);
-    stmt.close();
-    verifyFetchedLog(logs, expectedLogs);
-
-    // Verify the fetched log (incrementally)
-    final HiveStatement statement = (HiveStatement)con.createStatement();
-    assertNotNull("Statement is null", statement);
-    statement.setFetchSize(10000);
-    final List<String> incrementalLogs = new ArrayList<String>();
-
-    Runnable logThread = new Runnable() {
-      @Override
-      public void run() {
-        while (statement.hasMoreLogs()) {
-          try {
-            incrementalLogs.addAll(statement.getQueryLog());
-            Thread.sleep(500);
-          } catch (SQLException e) {
-            LOG.error("Failed getQueryLog. Error message: " + e.getMessage());
-            fail("error in getting log thread");
-          } catch (InterruptedException e) {
-            LOG.error("Getting log thread is interrupted. Error message: " + e.getMessage());
-            fail("error in getting log thread");
-          }
-        }
-      }
-    };
-
-    Thread thread = new Thread(logThread);
-    thread.setDaemon(true);
-    thread.start();
-    statement.executeQuery(sql);
-    thread.interrupt();
-    thread.join(10000);
-    // fetch remaining logs
-    List<String> remainingLogs;
-    do {
-      remainingLogs = statement.getQueryLog();
-      incrementalLogs.addAll(remainingLogs);
-    } while (remainingLogs.size() > 0);
-    statement.close();
-
-    verifyFetchedLog(incrementalLogs, expectedLogs);
-  }
-
-  private void verifyFetchedLog(List<String> logs, String[] expectedLogs) {
-    StringBuilder stringBuilder = new StringBuilder();
-
-    for (String log : logs) {
-      stringBuilder.append(log);
-    }
-
-    String accumulatedLogs = stringBuilder.toString();
-    for (String expectedLog : expectedLogs) {
-      assertTrue(accumulatedLogs.contains(expectedLog));
-    }
-  }
 }

Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java Mon Oct  6 03:44:13 2014
@@ -120,10 +120,9 @@ public class TestHS2AuthzContext {
     HiveAuthzContext context = contextCapturer.getValue();
 
     assertEquals("Command ", ctxCmd, context.getCommandString());
-    assertTrue("ip address pattern check", context.getIpAddress().matches("[.:a-fA-F0-9]+"));
+    assertTrue("ip address pattern check", context.getIpAddress().contains("."));
     // ip address size check - check for something better than non zero
     assertTrue("ip address size check", context.getIpAddress().length() > 7);
-
   }
 
   private Connection getConnection(String userName) throws SQLException {

Modified: hive/branches/spark/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/src/test/resources/testconfiguration.properties?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/spark/itests/src/test/resources/testconfiguration.properties Mon Oct  6 03:44:13 2014
@@ -55,7 +55,6 @@ minitez.query.files.shared=alter_merge_2
   bucket2.q,\
   bucket3.q,\
   bucket4.q,\
-  cbo_correctness.q,\
   correlationoptimizer1.q,\
   count.q,\
   create_merge_compressed.q,\
@@ -154,14 +153,11 @@ minitez.query.files.shared=alter_merge_2
   update_where_partitioned.q,\
   update_two_cols.q,\
   vector_cast_constant.q,\
-  vector_char_simple.q,\
   vector_data_types.q,\
   vector_decimal_aggregate.q,\
   vector_left_outer_join.q,\
   vector_mapjoin_reduce.q,\
   vector_string_concat.q,\
-  vector_varchar_simple.q,\
-  vectorization_0.q,\
   vectorization_12.q,\
   vectorization_13.q,\
   vectorization_14.q,\
@@ -173,22 +169,7 @@ minitez.query.files.shared=alter_merge_2
   vectorized_nested_mapjoin.q,\
   vectorized_ptf.q,\
   vectorized_shufflejoin.q,\
-  vectorized_timestamp_funcs.q,\
-  auto_sortmerge_join_1.q,\
-  auto_sortmerge_join_10.q,\
-  auto_sortmerge_join_11.q,\
-  auto_sortmerge_join_12.q,\
-  auto_sortmerge_join_13.q,\
-  auto_sortmerge_join_14.q,\
-  auto_sortmerge_join_15.q,\
-  auto_sortmerge_join_16.q,\
-  auto_sortmerge_join_2.q,\
-  auto_sortmerge_join_3.q,\
-  auto_sortmerge_join_4.q,\
-  auto_sortmerge_join_5.q,\
-  auto_sortmerge_join_7.q,\
-  auto_sortmerge_join_8.q,\
-  auto_sortmerge_join_9.q
+  vectorized_timestamp_funcs.q
 
 minitez.query.files=bucket_map_join_tez1.q,\
   bucket_map_join_tez2.q,\
@@ -205,11 +186,7 @@ minitez.query.files=bucket_map_join_tez1
   tez_joins_explain.q,\
   tez_schema_evolution.q,\
   tez_union.q,\
-  tez_union_decimal.q,\
-  tez_union_group_by.q,\
-  tez_smb_main.q,\
-  tez_smb_1.q,\
-  vectorized_dynamic_partition_pruning.q
+  tez_union_decimal.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\
@@ -365,7 +342,6 @@ beeline.positive.exclude=add_part_exist.
 
 minimr.query.negative.files=cluster_tasklog_retrieval.q,\
   file_with_header_footer_negative.q,\
-  local_mapred_error_cache.q,\
   mapreduce_stack_trace.q,\
   mapreduce_stack_trace_hadoop20.q,\
   mapreduce_stack_trace_turnoff.q,\

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Mon Oct  6 03:44:13 2014
@@ -690,10 +690,7 @@ public class QTestUtil {
     // conf.logVars(System.out);
     // System.out.flush();
 
-    String execEngine = conf.get("hive.execution.engine");
-    conf.set("hive.execution.engine", "mr");
     SessionState.start(conf);
-    conf.set("hive.execution.engine", execEngine);
     db = Hive.get(conf);
     fs = FileSystem.get(conf);
     drv = new Driver(conf);
@@ -774,8 +771,6 @@ public class QTestUtil {
     HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
         "org.apache.hadoop.hive.ql.security.DummyAuthenticator");
 
-    String execEngine = conf.get("hive.execution.engine");
-    conf.set("hive.execution.engine", "mr");
     CliSessionState ss = new CliSessionState(conf);
     assert ss != null;
     ss.in = System.in;
@@ -793,7 +788,6 @@ public class QTestUtil {
 
     isSessionStateStarted = true;
 
-    conf.set("hive.execution.engine", execEngine);
     return ss;
   }
 

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Mon Oct  6 03:44:13 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDAF;
 import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -33,8 +32,6 @@ import org.apache.hadoop.io.Text;
  * UDAFTestMax.
  *
  */
-@Description(name = "test_max",
-value = "_FUNC_(col) - UDF to report Max Value")
 public class UDAFTestMax extends UDAF {
 
   /**

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java Mon Oct  6 03:44:13 2014
@@ -27,7 +27,6 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -35,8 +34,6 @@ import org.apache.hadoop.io.Text;
 /**
  * A UDF for testing, which does key/value lookup from a file
  */
-@Description(name = "lookup",
-value = "_FUNC_(col) - UDF for key/value lookup from a file")
 public class UDFFileLookup extends UDF {
   static Log LOG = LogFactory.getLog(UDFFileLookup.class);
 

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java Mon Oct  6 03:44:13 2014
@@ -18,14 +18,11 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * A UDF for testing, which throws RuntimeException if  the length of a string.
  */
-@Description(name = "test_error",
-value = "_FUNC_(col) - UDF throws RuntimeException if  expression evaluates to false")
 public class UDFTestErrorOnFalse extends UDF {
 
   public int evaluate(Boolean b) {

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Mon Oct  6 03:44:13 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
@@ -26,8 +25,6 @@ import org.apache.hadoop.io.Text;
 /**
  * A UDF for testing, which evaluates the length of a string.
  */
-@Description(name = "testlength",
-value = "_FUNC_(col) - UDF evaluates the length of the string")
 public class UDFTestLength extends UDF {
 
   IntWritable result = new IntWritable();

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java Mon Oct  6 03:44:13 2014
@@ -18,15 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 /**
  * A UDF for testing, which evaluates the length of a string. This UDF uses Java
  * Primitive classes for parameters.
  */
-@Description(name = "testlength2",
-value = "_FUNC_(col) - UDF evaluates the length of the string and returns value as Java Integer")
 public class UDFTestLength2 extends UDF {
 
   public Integer evaluate(String s) {

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java Mon Oct  6 03:44:13 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.MapredContext;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -27,8 +26,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.Reporter;
-@Description(name = "counter",
-value = "_FUNC_(col) - UDF to report MR counter values")
+
 public class DummyContextUDF extends GenericUDF {
 
   private MapredContext context;

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java Mon Oct  6 03:44:13 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -28,8 +27,6 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * A test GenericUDF to return native Java's boolean type
  */
-@Description(name = "test_udf_get_java_boolean",
-value = "_FUNC_(str) - GenericUDF to return native Java's boolean type")
 public class GenericUDFTestGetJavaBoolean extends GenericUDF {
   ObjectInspector[] argumentOIs;
 

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java Mon Oct  6 03:44:13 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -28,8 +27,6 @@ import org.apache.hadoop.hive.serde2.obj
 /**
  * A test GenericUDF to return native Java's string type
  */
-@Description(name = "test_udf_get_java_string",
-value = "_FUNC_(str) - GenericUDF to return native Java's string type")
 public class GenericUDFTestGetJavaString extends GenericUDF {
   ObjectInspector[] argumentOIs;
 

Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Mon Oct  6 03:44:13 2014
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.udf.ge
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -35,8 +34,6 @@ import org.apache.hadoop.io.Text;
 /**
  * Mimics oracle's function translate(str1, str2, str3).
  */
-@Description(name = "test_translate",
-value = "_FUNC_(str1, str2, str3) - Mimics oracle's function translate(str1, str2, str3)")
 public class GenericUDFTestTranslate extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;
 

Modified: hive/branches/spark/jdbc/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/pom.xml?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/jdbc/pom.xml (original)
+++ hive/branches/spark/jdbc/pom.xml Mon Oct  6 03:44:13 2014
@@ -41,14 +41,8 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
-      <artifactId>hive-service</artifactId>
+      <artifactId>hive-metastore</artifactId>
       <version>${project.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.hive</groupId>
-            <artifactId>hive-exec</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
@@ -57,12 +51,12 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
-      <artifactId>hive-metastore</artifactId>
+      <artifactId>hive-service</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
-      <artifactId>hive-shims</artifactId>
+      <artifactId>hive-exec</artifactId>
       <version>${project.version}</version>
     </dependency>
     <!-- inter-project -->
@@ -141,39 +135,8 @@
               <minimizeJar>true</minimizeJar>
               <shadedArtifactAttached>true</shadedArtifactAttached>
               <shadedClassifierName>${hive.jdbc.driver.classifier}</shadedClassifierName>
-              <filters>
-                <filter>
-                  <artifact>org.apache.hive.shims:hive-shims-common</artifact>
-                  <includes>
-                    <include>org/apache/hadoop/hive/shims/*</include>
-                    <include>org/apache/hadoop/hive/thrift/*</include>
-                  </includes>
-                </filter>
-                <filter>
-                  <artifact>org.apache.hive.shims:hive-shims-common-secure</artifact>
-                  <includes>
-                    <include>org/apache/hadoop/hive/thrift/*</include>
-                    <include>org/apache/hadoop/hive/thrift/client/*</include>
-                  </includes>
-                </filter>
-                <filter>
-                  <artifact>org.apache.hive.shims:hive-shims-0.23</artifact>
-                  <includes>
-                    <include>org/apache/hadoop/hive/thrift/*</include>
-                  </includes>
-                </filter>
-                <filter>
-                  <artifact>*:*</artifact>
-                  <excludes>
-                    <exclude>META-INF/*.SF</exclude>
-                    <exclude>META-INF/*.DSA</exclude>
-                    <exclude>META-INF/*.RSA</exclude>
-                  </excludes>
-                </filter>
-              </filters>
               <artifactSet>
                 <excludes>
-                  <exclude>org.apache.commons:commons-compress</exclude>
                   <exclude>org.apache.hadoop:*</exclude>
                   <exclude>org.apache.hive:hive-ant</exclude>
                   <exclude>org.apache.ant:*</exclude>
@@ -187,16 +150,23 @@
                   <exclude>org.tukaani:*</exclude>
                   <exclude>org.iq80.snappy:*</exclude>
                   <exclude>org.apache.velocity:*</exclude>
-                  <exclude>net.sf.jpam:*</exclude>
-                  <exclude>org.apache.avro:*</exclude>
-                  <exclude>net.sf.opencsv:*</exclude>
-                  <exclude>org.antlr:*</exclude>
                 </excludes>
-              </artifactSet>       
+              </artifactSet>
+              <filters>
+                <filter>
+                  <artifact>*:*</artifact>
+                  <excludes>
+                    <exclude>META-INF/*.SF</exclude>
+                    <exclude>META-INF/*.DSA</exclude>
+                    <exclude>META-INF/*.RSA</exclude>
+                  </excludes>
+                </filter>
+              </filters>
             </configuration>
           </execution>
         </executions>
       </plugin>
     </plugins>
   </build>
+
 </project>

Modified: hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Mon Oct  6 03:44:13 2014
@@ -102,9 +102,8 @@ public class HiveDriver implements Drive
     return Pattern.matches(URL_PREFIX + ".*", url);
   }
 
-  @Override
   public Connection connect(String url, Properties info) throws SQLException {
-    return acceptsURL(url) ? new HiveConnection(url, info) : null;
+    return new HiveConnection(url, info);
   }
 
   /**

Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java Mon Oct  6 03:44:13 2014
@@ -28,7 +28,6 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -76,7 +75,6 @@ public class HiveQueryResultSet extends 
   private boolean fetchFirst = false;
 
   private final TProtocolVersion protocol;
-  private ReentrantLock transportLock;
 
 
   public static class Builder {
@@ -100,7 +98,6 @@ public class HiveQueryResultSet extends 
     private int fetchSize = 50;
     private boolean emptyResultSet = false;
     private boolean isScrollable = false;
-    private ReentrantLock transportLock = null;
 
     public Builder(Statement statement) throws SQLException {
       this.statement = statement;
@@ -169,11 +166,6 @@ public class HiveQueryResultSet extends 
       return this;
     }
 
-    public Builder setTransportLock(ReentrantLock transportLock) {
-      this.transportLock = transportLock;
-      return this;
-    }
-
     public HiveQueryResultSet build() throws SQLException {
       return new HiveQueryResultSet(this);
     }
@@ -189,7 +181,6 @@ public class HiveQueryResultSet extends 
     this.stmtHandle = builder.stmtHandle;
     this.sessHandle = builder.sessHandle;
     this.fetchSize = builder.fetchSize;
-    this.transportLock = builder.transportLock;
     columnNames = new ArrayList<String>();
     columnTypes = new ArrayList<String>();
     columnAttributes = new ArrayList<JdbcColumnAttributes>();
@@ -248,17 +239,7 @@ public class HiveQueryResultSet extends 
     try {
       TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle);
       // TODO need session handle
-      TGetResultSetMetadataResp  metadataResp;
-      if (transportLock == null) {
-        metadataResp = client.GetResultSetMetadata(metadataReq);
-      } else {
-        transportLock.lock();
-        try {
-          metadataResp = client.GetResultSetMetadata(metadataReq);
-        } finally {
-          transportLock.unlock();
-        }
-      }
+      TGetResultSetMetadataResp  metadataResp = client.GetResultSetMetadata(metadataReq);
       Utils.verifySuccess(metadataResp.getStatus());
 
       StringBuilder namesSb = new StringBuilder();
@@ -345,17 +326,7 @@ public class HiveQueryResultSet extends 
       if (fetchedRows == null || !fetchedRowsItr.hasNext()) {
         TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle,
             orientation, fetchSize);
-        TFetchResultsResp fetchResp;
-        if (transportLock == null) {
-          fetchResp = client.FetchResults(fetchReq);
-        } else {
-          transportLock.lock();
-          try {
-            fetchResp = client.FetchResults(fetchReq);
-          } finally {
-            transportLock.unlock();
-          }
-        }
+        TFetchResultsResp fetchResp = client.FetchResults(fetchReq);
         Utils.verifySuccessWithInfo(fetchResp.getStatus());
 
         TRowSet results = fetchResp.getResults();