You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/17 07:28:45 UTC

svn commit: r1625461 [2/2] - in /hive/branches/cbo: ./ common/src/java/org/apache/hadoop/hive/conf/ data/files/ itests/hive-unit/src/test/java/org/apache/hive/beeline/ itests/hive-unit/src/test/java/org/apache/hive/jdbc/ itests/src/test/resources/ jdbc...

Modified: hive/branches/cbo/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q (original)
+++ hive/branches/cbo/ql/src/test/queries/clientpositive/vector_decimal_aggregate.q Wed Sep 17 05:28:43 2014
@@ -1,20 +1,35 @@
 CREATE TABLE decimal_vgby STORED AS ORC AS 
-	SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
-	CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
-	cint
-	FROM alltypesorc;
+    SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+    CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+    cint
+    FROM alltypesorc;
 
 SET hive.vectorized.execution.enabled=true;
 
+-- First only do simple aggregations that output primitives only
 EXPLAIN SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1;
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1;
 SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1;
\ No newline at end of file
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1;
+
+-- Now add the others...
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1;
+SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1;
\ No newline at end of file

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out Wed Sep 17 05:28:43 2014
@@ -1,34 +1,141 @@
 PREHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
-	SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
-	CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
-	cint
-	FROM alltypesorc
+    SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+    CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+    cint
+    FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@decimal_vgby
 POSTHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
-	SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
-	CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
-	cint
-	FROM alltypesorc
+    SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+    CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+    cint
+    FROM alltypesorc
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@alltypesorc
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@decimal_vgby
-PREHOOK: query: EXPLAIN SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+PREHOOK: query: -- First only do simple aggregations that output primitives only
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+POSTHOOK: query: -- First only do simple aggregations that output primitives only
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: decimal_vgby
+                  Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14))
+                    outputColumnNames: cint, cdecimal1, cdecimal2
+                    Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count()
+                      keys: cint (type: int)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+                      Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), sum(VALUE._col3), count(VALUE._col4), max(VALUE._col5), min(VALUE._col6), sum(VALUE._col7), count(VALUE._col8)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+                Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: (_col9 > 1) (type: boolean)
+                  Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14))
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+                    Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            Execution mode: vectorized
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_vgby
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_vgby
+#### A masked pattern was here ####
+0	3072	9318.4351351351	-4298.1513513514	5018444.1081079808	3072	11160.71538461538500	-5147.90769230769300	6010604.30769230735360
+-3728	6	5831542.2692483780	-3367.6517567568	5817556.0411483778	6	6984454.21109769200000	-4033.445769230769	6967702.86724384584710
+-563	2	-515.6210729730	-3367.6517567568	-3883.2728297298	2	-617.56077692307690	-4033.445769230769	-4651.00654615384590
+762	2	5831542.2692483780	1531.2194054054	5833073.4886537834	2	6984454.21109769200000	1833.9456923076925	6986288.15678999969250
+6981	3	5831542.269248378	-515.6210729730	5830511.0271024320	3	6984454.211097692	-617.56077692307690	6983219.08954384584620
+253665376	1024	9767.0054054054	-9779.5486486487	-347484.0818378374	1024	11697.96923076923100	-11712.99230769231000	-416182.64030769233089
+528534767	1024	5831542.2692483780	-9777.1594594595	11646372.8607481068	1024	6984454.21109769200000	-11710.13076923077100	13948892.79980307629003
+626923679	1024	9723.4027027027	-9778.9513513514	10541.0525297287	1024	11645.74615384615400	-11712.27692307692300	12625.04759999997746
+PREHOOK: query: -- Now add the others...
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Now add the others...
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -93,20 +200,20 @@ STAGE PLANS:
         ListSink
 
 PREHOOK: query: SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_vgby
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_vgby
 #### A masked pattern was here ####

Modified: hive/branches/cbo/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out (original)
+++ hive/branches/cbo/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out Wed Sep 17 05:28:43 2014
@@ -1,34 +1,134 @@
 PREHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
-	SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
-	CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
-	cint
-	FROM alltypesorc
+    SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+    CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+    cint
+    FROM alltypesorc
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@alltypesorc
 PREHOOK: Output: database:default
 PREHOOK: Output: default@decimal_vgby
 POSTHOOK: query: CREATE TABLE decimal_vgby STORED AS ORC AS 
-	SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
-	CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
-	cint
-	FROM alltypesorc
+    SELECT cdouble, CAST (((cdouble*22.1)/37) AS DECIMAL(20,10)) AS cdecimal1, 
+    CAST (((cdouble*9.3)/13) AS DECIMAL(23,14)) AS cdecimal2,
+    cint
+    FROM alltypesorc
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@alltypesorc
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@decimal_vgby
-PREHOOK: query: EXPLAIN SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+PREHOOK: query: -- First only do simple aggregations that output primitives only
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+POSTHOOK: query: -- First only do simple aggregations that output primitives only
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: decimal_vgby
+            Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14))
+              outputColumnNames: cint, cdecimal1, cdecimal2
+              Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count()
+                keys: cint (type: int)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+                Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: int)
+                  Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0), max(VALUE._col1), min(VALUE._col2), sum(VALUE._col3), count(VALUE._col4), max(VALUE._col5), min(VALUE._col6), sum(VALUE._col7), count(VALUE._col8)
+          keys: KEY._col0 (type: int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9
+          Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (_col9 > 1) (type: boolean)
+            Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14))
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
+              Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@decimal_vgby
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@decimal_vgby
+#### A masked pattern was here ####
+NULL	3072	9318.4351351351	-4298.1513513514	5018444.1081079808	3072	11160.71538461538500	-5147.90769230769300	6010604.30769230735360
+-3728	6	5831542.2692483780	-3367.6517567568	5817556.0411483778	6	6984454.21109769200000	-4033.445769230769	6967702.86724384584710
+-563	2	-515.6210729730	-3367.6517567568	-3883.2728297298	2	-617.56077692307690	-4033.445769230769	-4651.00654615384590
+762	2	5831542.2692483780	1531.2194054054	5833073.4886537834	2	6984454.21109769200000	1833.9456923076925	6986288.15678999969250
+6981	3	5831542.269248378	-515.6210729730	5830511.0271024320	3	6984454.211097692	-617.56077692307690	6983219.08954384584620
+253665376	1024	9767.0054054054	-9779.5486486487	-347484.0818378374	1024	11697.96923076923100	-11712.99230769231000	-416182.64030769233089
+528534767	1024	5831542.2692483780	-9777.1594594595	11646372.8607481068	1024	6984454.21109769200000	-11710.13076923077100	13948892.79980307629003
+626923679	1024	9723.4027027027	-9778.9513513514	10541.0525297287	1024	11645.74615384615400	-11712.27692307692300	12625.04759999997746
+PREHOOK: query: -- Now add the others...
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Now add the others...
+EXPLAIN SELECT cint,
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -87,20 +187,20 @@ STAGE PLANS:
         ListSink
 
 PREHOOK: query: SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@decimal_vgby
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT cint,
-	COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
-	COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
-	FROM decimal_vgby
-	GROUP BY cint
-	HAVING COUNT(*) > 1
+    COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
+    COUNT(cdecimal2), MAX(cdecimal2), MIN(cdecimal2), SUM(cdecimal2), AVG(cdecimal2), STDDEV_POP(cdecimal2), STDDEV_SAMP(cdecimal2)
+    FROM decimal_vgby
+    GROUP BY cint
+    HAVING COUNT(*) > 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_vgby
 #### A masked pattern was here ####

Modified: hive/branches/cbo/serde/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/cbo/serde/pom.xml?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/serde/pom.xml (original)
+++ hive/branches/cbo/serde/pom.xml Wed Sep 17 05:28:43 2014
@@ -70,7 +70,13 @@
       <artifactId>libthrift</artifactId>
       <version>${libthrift.version}</version>
     </dependency>
-    <!-- test inter-project -->
+    <dependency>
+      <groupId>net.sf.opencsv</groupId>
+      <artifactId>opencsv</artifactId>
+      <version>${opencsv.version}</version>
+    </dependency>
+
+      <!-- test inter-project -->
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/CLIService.java Wed Sep 17 05:28:43 2014
@@ -66,7 +66,7 @@ public class CLIService extends Composit
   private UserGroupInformation httpUGI;
 
   public CLIService() {
-    super("CLIService");
+    super(CLIService.class.getSimpleName());
   }
 
   @Override
@@ -201,8 +201,7 @@ public class CLIService extends Composit
    * @see org.apache.hive.service.cli.ICLIService#closeSession(org.apache.hive.service.cli.SessionHandle)
    */
   @Override
-  public void closeSession(SessionHandle sessionHandle)
-      throws HiveSQLException {
+  public void closeSession(SessionHandle sessionHandle) throws HiveSQLException {
     sessionManager.closeSession(sessionHandle);
     LOG.debug(sessionHandle + ": closeSession()");
   }
@@ -470,4 +469,8 @@ public class CLIService extends Composit
     sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, tokenStr);
     LOG.info(sessionHandle  + ": renewDelegationToken()");
   }
+
+  public SessionManager getSessionManager() {
+    return sessionManager;
+  }
 }

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java Wed Sep 17 05:28:43 2014
@@ -47,7 +47,7 @@ public class OperationManager extends Ab
       new HashMap<OperationHandle, Operation>();
 
   public OperationManager() {
-    super("OperationManager");
+    super(OperationManager.class.getSimpleName());
   }
 
   @Override

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Wed Sep 17 05:28:43 2014
@@ -166,8 +166,8 @@ public class HiveSessionImpl implements 
     IHiveFileProcessor processor = new GlobalHivercFileProcessor();
 
     try {
-      if (hiveConf.getVar(ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION) != null) {
-        String hiverc = hiveConf.getVar(ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION)
+      if (hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION) != null) {
+        String hiverc = hiveConf.getVar(ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION)
             + File.separator + SessionManager.HIVERCFILE;
         if (new File(hiverc).exists()) {
           LOG.info("Running global init file: " + hiverc);

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/SessionManager.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/SessionManager.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/SessionManager.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/session/SessionManager.java Wed Sep 17 05:28:43 2014
@@ -67,7 +67,7 @@ public class SessionManager extends Comp
   private volatile boolean shutdown;
 
   public SessionManager() {
-    super("SessionManager");
+    super(SessionManager.class.getSimpleName());
   }
 
   @Override
@@ -356,5 +356,9 @@ public class SessionManager extends Comp
     return backgroundOperationPool.submit(r);
   }
 
+  public int getOpenSessionCount() {
+    return handleToSession.size();
+  }
+
 }
 

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java Wed Sep 17 05:28:43 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hive.service.cli.thrift;
 
-import java.net.InetSocketAddress;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.SynchronousQueue;
 import java.util.concurrent.ThreadPoolExecutor;
@@ -40,72 +39,54 @@ import org.apache.thrift.transport.TTran
 public class ThriftBinaryCLIService extends ThriftCLIService {
 
   public ThriftBinaryCLIService(CLIService cliService) {
-    super(cliService, "ThriftBinaryCLIService");
+    super(cliService, ThriftBinaryCLIService.class.getSimpleName());
   }
 
   @Override
   public void run() {
     try {
-      hiveAuthFactory = new HiveAuthFactory(hiveConf);
-      TTransportFactory  transportFactory = hiveAuthFactory.getAuthTransFactory();
-      TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
-
-      String portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
-      if (portString != null) {
-        portNum = Integer.valueOf(portString);
-      } else {
-        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
-      }
-
-      String hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
-      if (hiveHost == null) {
-        hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
-      }
-
-      if (hiveHost != null && !hiveHost.isEmpty()) {
-        serverAddress = new InetSocketAddress(hiveHost, portNum);
-      } else {
-        serverAddress = new  InetSocketAddress(portNum);
-      }
-
-      minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
-      maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
-      workerKeepAliveTime = hiveConf.getTimeVar(
-          ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME, TimeUnit.SECONDS);
+      // Server thread pool
       String threadPoolName = "HiveServer2-Handler-Pool";
       ExecutorService executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads,
           workerKeepAliveTime, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
           new ThreadFactoryWithGarbageCleanup(threadPoolName));
 
+      // Thrift configs
+      hiveAuthFactory = new HiveAuthFactory(hiveConf);
+      TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory();
+      TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
       TServerSocket serverSocket = null;
       if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
         serverSocket = HiveAuthFactory.getServerSocket(hiveHost, portNum);
       } else {
         String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
         if (keyStorePath.isEmpty()) {
-          throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
-              " Not configured for SSL connection");
+          throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname
+              + " Not configured for SSL connection");
         }
         String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf,
             HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
-        serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum,
-            keyStorePath, keyStorePassword);
+        serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum, keyStorePath,
+            keyStorePassword);
       }
+
+      // Server args
       TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket)
-      .processorFactory(processorFactory)
-      .transportFactory(transportFactory)
-      .protocolFactory(new TBinaryProtocol.Factory())
-      .executorService(executorService);
+          .processorFactory(processorFactory).transportFactory(transportFactory)
+          .protocolFactory(new TBinaryProtocol.Factory()).executorService(executorService);
 
+      // TCP Server
       server = new TThreadPoolServer(sargs);
-
-      LOG.info("ThriftBinaryCLIService listening on " + serverAddress);
-
       server.serve();
-
+      String msg = "Started " + ThriftBinaryCLIService.class.getSimpleName() + " on port "
+          + portNum + " with " + minWorkerThreads + "..." + maxWorkerThreads + " worker threads";
+      LOG.info(msg);
     } catch (Throwable t) {
-      LOG.error("Error: ", t);
+      LOG.fatal(
+          "Error starting HiveServer2: could not start "
+              + ThriftBinaryCLIService.class.getSimpleName(), t);
+      System.exit(-1);
     }
-
   }
+
 }

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java Wed Sep 17 05:28:43 2014
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.TimeUnit;
 
 import javax.security.auth.login.LoginException;
 
@@ -34,6 +35,7 @@ import org.apache.hive.service.auth.Hive
 import org.apache.hive.service.auth.TSetIpAddressProcessor;
 import org.apache.hive.service.cli.*;
 import org.apache.hive.service.cli.session.SessionManager;
+import org.apache.hive.service.server.HiveServer2;
 import org.apache.thrift.TException;
 import org.apache.thrift.server.TServer;
 
@@ -48,9 +50,11 @@ public abstract class ThriftCLIService e
   protected CLIService cliService;
   private static final TStatus OK_STATUS = new TStatus(TStatusCode.SUCCESS_STATUS);
   private static final TStatus ERROR_STATUS = new TStatus(TStatusCode.ERROR_STATUS);
+  protected static HiveAuthFactory hiveAuthFactory;
 
   protected int portNum;
   protected InetSocketAddress serverAddress;
+  protected String hiveHost;
   protected TServer server;
   protected org.eclipse.jetty.server.Server httpServer;
 
@@ -62,8 +66,7 @@ public abstract class ThriftCLIService e
   protected int minWorkerThreads;
   protected int maxWorkerThreads;
   protected long workerKeepAliveTime;
-
-  protected static HiveAuthFactory hiveAuthFactory;
+  private HiveServer2 hiveServer2;
 
   public ThriftCLIService(CLIService cliService, String serviceName) {
     super(serviceName);
@@ -73,6 +76,43 @@ public abstract class ThriftCLIService e
   @Override
   public synchronized void init(HiveConf hiveConf) {
     this.hiveConf = hiveConf;
+
+    // Initialize common server configs needed in both binary & http modes
+    String portString;
+    hiveHost = System.getenv("HIVE_SERVER2_THRIFT_BIND_HOST");
+    if (hiveHost == null) {
+      hiveHost = hiveConf.getVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
+    }
+    // HTTP mode
+    if (HiveServer2.isHTTPTransportMode(hiveConf)) {
+      workerKeepAliveTime =
+          hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME,
+              TimeUnit.SECONDS);
+      portString = System.getenv("HIVE_SERVER2_THRIFT_HTTP_PORT");
+      if (portString != null) {
+        portNum = Integer.valueOf(portString);
+      } else {
+        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT);
+      }
+    }
+    // Binary mode
+    else {
+      workerKeepAliveTime =
+          hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_WORKER_KEEPALIVE_TIME, TimeUnit.SECONDS);
+      portString = System.getenv("HIVE_SERVER2_THRIFT_PORT");
+      if (portString != null) {
+        portNum = Integer.valueOf(portString);
+      } else {
+        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT);
+      }
+    }
+    if (hiveHost != null && !hiveHost.isEmpty()) {
+      serverAddress = new InetSocketAddress(hiveHost, portNum);
+    } else {
+      serverAddress = new InetSocketAddress(portNum);
+    }
+    minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
+    maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
     super.init(hiveConf);
   }
 
@@ -105,6 +145,14 @@ public abstract class ThriftCLIService e
     super.stop();
   }
 
+  public int getPortNumber() {
+    return portNum;
+  }
+
+  public InetSocketAddress getServerAddress() {
+    return serverAddress;
+  }
+
   @Override
   public TGetDelegationTokenResp GetDelegationToken(TGetDelegationTokenReq req)
       throws TException {
@@ -308,6 +356,24 @@ public abstract class ThriftCLIService e
     } catch (Exception e) {
       LOG.warn("Error closing session: ", e);
       resp.setStatus(HiveSQLException.toTStatus(e));
+    } finally {
+      if (!(isEmbedded) && (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY))
+          && (!hiveServer2.isRegisteredWithZooKeeper())) {
+        // Asynchronously shutdown this instance of HiveServer2,
+        // if there are no active client sessions
+        if (cliService.getSessionManager().getOpenSessionCount() == 0) {
+          LOG.info("This instance of HiveServer2 has been removed from the list of server "
+              + "instances available for dynamic service discovery. "
+              + "The last client session has ended - will shutdown now.");
+          Thread shutdownThread = new Thread() {
+            @Override
+            public void run() {
+              hiveServer2.stop();
+            }
+          };
+          shutdownThread.start();
+        }
+      }
     }
     return resp;
   }
@@ -591,5 +657,9 @@ public abstract class ThriftCLIService e
         .equalsIgnoreCase(HiveAuthFactory.AuthTypes.KERBEROS.toString());
   }
 
+  public void setHiveServer2(HiveServer2 hiveServer2) {
+    this.hiveServer2 = hiveServer2;
+  }
+
 }
 

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java Wed Sep 17 05:28:43 2014
@@ -48,100 +48,94 @@ import org.eclipse.jetty.util.thread.Exe
 public class ThriftHttpCLIService extends ThriftCLIService {
 
   public ThriftHttpCLIService(CLIService cliService) {
-    super(cliService, "ThriftHttpCLIService");
+    super(cliService, ThriftHttpCLIService.class.getSimpleName());
   }
 
+  /**
+   * Configure Jetty to serve http requests. Example of a client connection URL:
+   * http://localhost:10000/servlets/thrifths2/ A gateway may cause actual target URL to differ,
+   * e.g. http://gateway:port/hive2/servlets/thrifths2/
+   */
   @Override
   public void run() {
     try {
-      // Configure Jetty to serve http requests
-      // Example of a client connection URL: http://localhost:10000/servlets/thrifths2/
-      // a gateway may cause actual target URL to differ, e.g. http://gateway:port/hive2/servlets/thrifths2/
-
+      // Verify config validity
       verifyHttpConfiguration(hiveConf);
 
-      String portString = System.getenv("HIVE_SERVER2_THRIFT_HTTP_PORT");
-      if (portString != null) {
-        portNum = Integer.valueOf(portString);
-      } else {
-        portNum = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT);
-      }
-
-      minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MIN_WORKER_THREADS);
-      maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_WORKER_THREADS);
-      workerKeepAliveTime = hiveConf.getTimeVar(
-          ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME, TimeUnit.SECONDS);
-
-      String httpPath =  getHttpPath(hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
-
+      // HTTP Server
       httpServer = new org.eclipse.jetty.server.Server();
+
+      // Server thread pool
       String threadPoolName = "HiveServer2-HttpHandler-Pool";
       ExecutorService executorService = new ThreadPoolExecutor(minWorkerThreads, maxWorkerThreads,
           workerKeepAliveTime, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(),
           new ThreadFactoryWithGarbageCleanup(threadPoolName));
-
       ExecutorThreadPool threadPool = new ExecutorThreadPool(executorService);
       httpServer.setThreadPool(threadPool);
 
-      SelectChannelConnector connector = new SelectChannelConnector();;
+      // Connector configs
+      SelectChannelConnector connector = new SelectChannelConnector();
       boolean useSsl = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL);
       String schemeName = useSsl ? "https" : "http";
-      String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
-      // Set during the init phase of HiveServer2 if auth mode is kerberos
-      // UGI for the hive/_HOST (kerberos) principal
-      UserGroupInformation serviceUGI = cliService.getServiceUGI();
-      // UGI for the http/_HOST (SPNego) principal
-      UserGroupInformation httpUGI = cliService.getHttpUGI();
-
+      // Change connector if SSL is used
       if (useSsl) {
         String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
         String keyStorePassword = ShimLoader.getHadoopShims().getPassword(hiveConf,
             HiveConf.ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname);
         if (keyStorePath.isEmpty()) {
-          throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
-              " Not configured for SSL connection");
+          throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname
+              + " Not configured for SSL connection");
         }
         SslContextFactory sslContextFactory = new SslContextFactory();
         sslContextFactory.setKeyStorePath(keyStorePath);
         sslContextFactory.setKeyStorePassword(keyStorePassword);
         connector = new SslSelectChannelConnector(sslContextFactory);
       }
-      
       connector.setPort(portNum);
       // Linux:yes, Windows:no
       connector.setReuseAddress(!Shell.WINDOWS);
-      
-      int maxIdleTime = (int) hiveConf.getTimeVar(
-          ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME, TimeUnit.MILLISECONDS);
+      int maxIdleTime = (int) hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_MAX_IDLE_TIME,
+          TimeUnit.MILLISECONDS);
       connector.setMaxIdleTime(maxIdleTime);
-      
+
       httpServer.addConnector(connector);
 
+      // Thrift configs
       hiveAuthFactory = new HiveAuthFactory(hiveConf);
       TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this);
       TProcessor processor = processorFactory.getProcessor(null);
-
       TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
+      // Set during the init phase of HiveServer2 if auth mode is kerberos
+      // UGI for the hive/_HOST (kerberos) principal
+      UserGroupInformation serviceUGI = cliService.getServiceUGI();
+      // UGI for the http/_HOST (SPNego) principal
+      UserGroupInformation httpUGI = cliService.getHttpUGI();
+      String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION);
+      TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, authType,
+          serviceUGI, httpUGI);
 
-      TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory,
-          authType, serviceUGI, httpUGI);
-
+      // Context handler
       final ServletContextHandler context = new ServletContextHandler(
           ServletContextHandler.SESSIONS);
       context.setContextPath("/");
-
+      String httpPath = getHttpPath(hiveConf
+          .getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PATH));
       httpServer.setHandler(context);
       context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
 
       // TODO: check defaults: maxTimeout, keepalive, maxBodySize, bodyRecieveDuration, etc.
+      // Finally, start the server
       httpServer.start();
-      String msg = "Started ThriftHttpCLIService in " + schemeName + " mode on port " + portNum +
-          " path=" + httpPath +
-          " with " + minWorkerThreads + ".." + maxWorkerThreads + " worker threads";
+      String msg = "Started " + ThriftHttpCLIService.class.getSimpleName() + " in " + schemeName
+          + " mode on port " + portNum + " path=" + httpPath + " with " + minWorkerThreads + "..."
+          + maxWorkerThreads + " worker threads";
       LOG.info(msg);
       httpServer.join();
     } catch (Throwable t) {
-      LOG.error("Error: ", t);
+      LOG.fatal(
+          "Error starting HiveServer2: could not start "
+              + ThriftHttpCLIService.class.getSimpleName(), t);
+      System.exit(-1);
     }
   }
 
@@ -191,7 +185,8 @@ public class ThriftHttpCLIService extend
       // NONE in case of thrift mode uses SASL
       LOG.warn(ConfVars.HIVE_SERVER2_AUTHENTICATION + " setting to " +
           authType + ". SASL is not supported with http transport mode," +
-          " so using equivalent of " + AuthTypes.NOSASL);
+ " so using equivalent of "
+          + AuthTypes.NOSASL);
     }
   }
 

Modified: hive/branches/cbo/service/src/java/org/apache/hive/service/server/HiveServer2.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/java/org/apache/hive/service/server/HiveServer2.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/java/org/apache/hive/service/server/HiveServer2.java (original)
+++ hive/branches/cbo/service/src/java/org/apache/hive/service/server/HiveServer2.java Wed Sep 17 05:28:43 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hive.service.server;
 
+import java.nio.charset.Charset;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.LogUtils;
@@ -25,12 +27,21 @@ import org.apache.hadoop.hive.common.Log
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
+import org.apache.hadoop.hive.ql.util.ZooKeeperHiveHelper;
 import org.apache.hive.common.util.HiveStringUtils;
+import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.service.CompositeService;
+import org.apache.hive.service.ServiceException;
 import org.apache.hive.service.cli.CLIService;
 import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.hive.service.cli.thrift.ThriftHttpCLIService;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+import org.apache.zookeeper.ZooDefs.Ids;
+import org.apache.zookeeper.ZooKeeper;
 
 /**
  * HiveServer2.
@@ -41,9 +52,12 @@ public class HiveServer2 extends Composi
 
   private CLIService cliService;
   private ThriftCLIService thriftCLIService;
+  private String znodePath;
+  private ZooKeeper zooKeeperClient;
+  private boolean registeredWithZooKeeper = false;
 
   public HiveServer2() {
-    super("HiveServer2");
+    super(HiveServer2.class.getSimpleName());
     HiveConf.setLoadHiveServer2Config(true);
   }
 
@@ -52,20 +66,129 @@ public class HiveServer2 extends Composi
   public synchronized void init(HiveConf hiveConf) {
     cliService = new CLIService();
     addService(cliService);
+    if (isHTTPTransportMode(hiveConf)) {
+      thriftCLIService = new ThriftHttpCLIService(cliService);
+    } else {
+      thriftCLIService = new ThriftBinaryCLIService(cliService);
+    }
+    addService(thriftCLIService);
+    thriftCLIService.setHiveServer2(this);
+    super.init(hiveConf);
 
+    // Add a shutdown hook for catching SIGTERM & SIGINT
+    final HiveServer2 hiveServer2 = this;
+    Runtime.getRuntime().addShutdownHook(new Thread() {
+      @Override
+      public void run() {
+        hiveServer2.stop();
+      }
+    });
+  }
+
+  public static boolean isHTTPTransportMode(HiveConf hiveConf) {
     String transportMode = System.getenv("HIVE_SERVER2_TRANSPORT_MODE");
-    if(transportMode == null) {
+    if (transportMode == null) {
       transportMode = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE);
     }
-    if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
-      thriftCLIService = new ThriftHttpCLIService(cliService);
+    if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
+      return true;
     }
-    else {
-      thriftCLIService = new ThriftBinaryCLIService(cliService);
+    return false;
+  }
+
+  /**
+   * Adds a server instance to ZooKeeper as a znode.
+   *
+   * @param hiveConf
+   * @throws Exception
+   */
+  private void addServerInstanceToZooKeeper(HiveConf hiveConf) throws Exception {
+    int zooKeeperSessionTimeout =
+        hiveConf.getIntVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_SESSION_TIMEOUT);
+    String zooKeeperEnsemble = ZooKeeperHiveHelper.getQuorumServers(hiveConf);
+    String rootNamespace = hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE);
+    String instanceURI = getServerInstanceURI(hiveConf);
+    byte[] znodeDataUTF8 = instanceURI.getBytes(Charset.forName("UTF-8"));
+    zooKeeperClient =
+        new ZooKeeper(zooKeeperEnsemble, zooKeeperSessionTimeout,
+            new ZooKeeperHiveHelper.DummyWatcher());
+
+    // Create the parent znodes recursively; ignore if the parent already exists
+    try {
+      ZooKeeperHiveHelper.createPathRecursively(zooKeeperClient, rootNamespace,
+          Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
+      LOG.info("Created the root name space: " + rootNamespace + " on ZooKeeper for HiveServer2");
+    } catch (KeeperException e) {
+      if (e.code() != KeeperException.Code.NODEEXISTS) {
+        LOG.fatal("Unable to create HiveServer2 namespace: " + rootNamespace + " on ZooKeeper", e);
+        throw (e);
+      }
     }
+    // Create a znode under the rootNamespace parent for this instance of the server
+    // Znode name: server-host:port-versionInfo-sequence
+    try {
+      String znodePath =
+          ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + rootNamespace
+              + ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR + "server-" + instanceURI + "-"
+              + HiveVersionInfo.getVersion() + "-";
+      znodePath =
+          zooKeeperClient.create(znodePath, znodeDataUTF8, Ids.OPEN_ACL_UNSAFE,
+              CreateMode.EPHEMERAL_SEQUENTIAL);
+      setRegisteredWithZooKeeper(true);
+      // Set a watch on the znode
+      if (zooKeeperClient.exists(znodePath, new DeRegisterWatcher()) == null) {
+        // No node exists, throw exception
+        throw new Exception("Unable to create znode for this HiveServer2 instance on ZooKeeper.");
+      }
+      LOG.info("Created a znode on ZooKeeper for HiveServer2 uri: " + instanceURI);
+    } catch (KeeperException e) {
+      LOG.fatal("Unable to create a znode for this server instance", e);
+      throw new Exception(e);
+    }
+  }
 
-    addService(thriftCLIService);
-    super.init(hiveConf);
+  /**
+   * The watcher class which sets the de-register flag when the znode corresponding to this server
+   * instance is deleted. Additionally, it shuts down the server if there are no more active client
+   * sessions at the time of receiving a 'NodeDeleted' notification from ZooKeeper.
+   */
+  private class DeRegisterWatcher implements Watcher {
+    public void process(WatchedEvent event) {
+      if (event.getType().equals(Watcher.Event.EventType.NodeDeleted)) {
+        HiveServer2.this.setRegisteredWithZooKeeper(false);
+        // If there are no more active client sessions, stop the server
+        if (cliService.getSessionManager().getOpenSessionCount() == 0) {
+          LOG.warn("This instance of HiveServer2 has been removed from the list of server "
+              + "instances available for dynamic service discovery. "
+              + "The last client session has ended - will shutdown now.");
+          HiveServer2.this.stop();
+        }
+        LOG.warn("This HiveServer2 instance is now de-registered from ZooKeeper. "
+            + "The server will be shut down after the last client sesssion completes.");
+      }
+    }
+  }
+
+  private void removeServerInstanceFromZooKeeper() throws Exception {
+    setRegisteredWithZooKeeper(false);
+    zooKeeperClient.close();
+    LOG.info("Server instance removed from ZooKeeper.");
+  }
+
+  public boolean isRegisteredWithZooKeeper() {
+    return registeredWithZooKeeper;
+  }
+
+  private void setRegisteredWithZooKeeper(boolean registeredWithZooKeeper) {
+    this.registeredWithZooKeeper = registeredWithZooKeeper;
+  }
+
+  private String getServerInstanceURI(HiveConf hiveConf) throws Exception {
+    if ((thriftCLIService == null) || (thriftCLIService.getServerAddress() == null)) {
+      throw new Exception("Unable to get the server address; it hasn't been initialized yet.");
+    }
+    return thriftCLIService.getServerAddress().getHostName() + ":"
+        + thriftCLIService.getPortNumber();
   }
 
   @Override
@@ -75,23 +198,32 @@ public class HiveServer2 extends Composi
 
   @Override
   public synchronized void stop() {
-    super.stop();
-    // there should already be an instance of the session pool manager.
-    // if not, ignoring is fine while stopping the hive server.
+    LOG.info("Shutting down HiveServer2");
     HiveConf hiveConf = this.getHiveConf();
+    super.stop();
+    // Remove this server instance from ZooKeeper if dynamic service discovery is set
+    if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY)) {
+      try {
+        removeServerInstanceFromZooKeeper();
+      } catch (Exception e) {
+        LOG.error("Error removing znode for this HiveServer2 instance from ZooKeeper.", e);
+      }
+    }
+    // There should already be an instance of the session pool manager.
+    // If not, ignoring is fine while stopping HiveServer2.
     if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_TEZ_INITIALIZE_DEFAULT_SESSIONS)) {
       try {
         TezSessionPoolManager.getInstance().stop();
       } catch (Exception e) {
-        LOG.error("Tez session pool manager stop had an error during stop of hive server");
-        e.printStackTrace();
+        LOG.error("Tez session pool manager stop had an error during stop of HiveServer2. "
+            + "Shutting down HiveServer2 anyway.", e);
       }
     }
   }
 
   private static void startHiveServer2() throws Throwable {
     long attempts = 0, maxAttempts = 1;
-    while(true) {
+    while (true) {
       HiveConf hiveConf = new HiveConf();
       maxAttempts = hiveConf.getLongVar(HiveConf.ConfVars.HIVE_SERVER2_MAX_START_ATTEMPTS);
       HiveServer2 server = null;
@@ -99,6 +231,11 @@ public class HiveServer2 extends Composi
         server = new HiveServer2();
         server.init(hiveConf);
         server.start();
+        // If we're supporting dynamic service discovery, we'll add the service uri for this
+        // HiveServer2 instance to Zookeeper as a znode.
+        if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY)) {
+          server.addServerInstanceToZooKeeper(hiveConf);
+        }
         if (hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_TEZ_INITIALIZE_DEFAULT_SESSIONS)) {
           TezSessionPoolManager sessionPool = TezSessionPoolManager.getInstance();
           sessionPool.setupPool(hiveConf);
@@ -106,19 +243,19 @@ public class HiveServer2 extends Composi
         }
         break;
       } catch (Throwable throwable) {
-        if(++attempts >= maxAttempts) {
+        if (++attempts >= maxAttempts) {
           throw new Error("Max start attempts " + maxAttempts + " exhausted", throwable);
         } else {
-          LOG.warn("Error starting HiveServer2 on attempt " + attempts +
-            ", will retry in 60 seconds", throwable);
+          LOG.warn("Error starting HiveServer2 on attempt " + attempts
+              + ", will retry in 60 seconds", throwable);
           try {
             if (server != null) {
               server.stop();
               server = null;
             }
           } catch (Exception e) {
-            LOG.info("Exception caught when calling stop of HiveServer2 before" +
-              " retrying start", e);
+            LOG.info(
+                "Exception caught when calling stop of HiveServer2 before" + " retrying start", e);
           }
           try {
             Thread.sleep(60L * 1000L);
@@ -139,14 +276,15 @@ public class HiveServer2 extends Composi
         System.exit(-1);
       }
 
-      //NOTE: It is critical to do this here so that log4j is reinitialized
+      // NOTE: It is critical to do this here so that log4j is reinitialized
       // before any of the other core hive classes are loaded
       String initLog4jMessage = LogUtils.initHiveLog4j();
       LOG.debug(initLog4jMessage);
 
       HiveStringUtils.startupShutdownMessage(HiveServer2.class, args, LOG);
-      //log debug message from "oproc" after log4j initialize properly
+      // log debug message from "oproc" after log4j initialize properly
       LOG.debug(oproc.getDebugMessage().toString());
+
       startHiveServer2();
     } catch (LogInitializationException e) {
       LOG.error("Error initializing log: " + e.getMessage(), e);
@@ -156,6 +294,5 @@ public class HiveServer2 extends Composi
       System.exit(-1);
     }
   }
-
 }
 

Modified: hive/branches/cbo/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java
URL: http://svn.apache.org/viewvc/hive/branches/cbo/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java?rev=1625461&r1=1625460&r2=1625461&view=diff
==============================================================================
--- hive/branches/cbo/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java (original)
+++ hive/branches/cbo/service/src/test/org/apache/hive/service/cli/session/TestSessionGlobalInitFile.java Wed Sep 17 05:28:43 2014
@@ -27,7 +27,11 @@ import junit.framework.TestCase;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.cli.*;
+import org.apache.hive.service.cli.CLIService;
+import org.apache.hive.service.cli.ICLIService;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
 import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
 import org.junit.After;
@@ -83,7 +87,7 @@ public class TestSessionGlobalInitFile e
 
     // set up service and client
     HiveConf hiveConf = new HiveConf();
-    hiveConf.setVar(HiveConf.ConfVars.HIVE_GLOBAL_INIT_FILE_LOCATION,
+    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
         initFile.getParentFile().getAbsolutePath());
     service = new FakeEmbeddedThriftBinaryCLIService(hiveConf);
     service.init(new HiveConf());