You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2016/09/12 20:24:46 UTC
[14/31] hive git commit: HIVE-14039: HiveServer2: Make the usage of
server with JDBC thirft serde enabled,
backward compatible for older clients (Ziyang Zhao reviewed by Vaibhav
Gumashta)
HIVE-14039: HiveServer2: Make the usage of server with JDBC thirft serde enabled, backward compatible for older clients (Ziyang Zhao reviewed by Vaibhav Gumashta)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d2e294cf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d2e294cf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d2e294cf
Branch: refs/heads/hive-14535
Commit: d2e294cf592aa8f9393537b64562fe11b7355306
Parents: 5edf7c8
Author: Vaibhav Gumashta <vg...@hortonworks.com>
Authored: Fri Sep 9 02:48:38 2016 -0700
Committer: Vaibhav Gumashta <vg...@hortonworks.com>
Committed: Fri Sep 9 02:48:38 2016 -0700
----------------------------------------------------------------------
.../apache/hive/jdbc/TestJdbcWithMiniHS2.java | 44 ++++++++++++++++++++
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 3 +-
.../hadoop/hive/ql/parse/TaskCompiler.java | 2 +-
.../hadoop/hive/ql/session/SessionState.java | 13 ++++++
.../service/cli/session/HiveSessionImpl.java | 9 +++-
5 files changed, 67 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/d2e294cf/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
index 0249566..e8e57ef 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
@@ -512,6 +512,11 @@ public class TestJdbcWithMiniHS2 {
conf.setInt("hive.server2.thrift.resultset.max.fetch.size", 1000);
}
+ private void unsetSerializeInTasksInConf(HiveConf conf) {
+ conf.setBoolean("hive.server2.thrift.resultset.serialize.in.tasks", false);
+ conf.unset("hive.server2.thrift.resultset.max.fetch.size");
+ }
+
@Test
public void testMetadataQueriesWithSerializeThriftInTasks() throws Exception {
//stop HiveServer2
@@ -674,6 +679,45 @@ public class TestJdbcWithMiniHS2 {
stmt.close();
}
+ @Test
+ public void testEnableThriftSerializeInTasks() throws Exception {
+ //stop HiveServer2
+ if (miniHS2.isStarted()) {
+ miniHS2.stop();
+ }
+
+ HiveConf conf = new HiveConf();
+ String userName;
+ setSerializeInTasksInConf(conf);
+ miniHS2 = new MiniHS2(conf);
+ Map<String, String> confOverlay = new HashMap<String, String>();
+ miniHS2.start(confOverlay);
+
+ userName = System.getProperty("user.name");
+ hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password");
+ Statement stmt = hs2Conn.createStatement();
+ stmt.execute("drop table if exists testThriftSerializeShow1");
+ stmt.execute("drop table if exists testThriftSerializeShow2");
+ stmt.execute("create table testThriftSerializeShow1 (a int)");
+ stmt.execute("create table testThriftSerializeShow2 (b int)");
+ stmt.execute("insert into testThriftSerializeShow1 values (1)");
+ stmt.execute("insert into testThriftSerializeShow2 values (2)");
+ ResultSet rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b");
+ assertTrue(!rs.next());
+
+ unsetSerializeInTasksInConf(conf);
+ rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b");
+ assertTrue(!rs.next());
+
+ setSerializeInTasksInConf(conf);
+ rs = stmt.executeQuery("select * from testThriftSerializeShow1 inner join testThriftSerializeShow2 where testThriftSerializeShow1.a=testThriftSerializeShow2.b");
+ assertTrue(!rs.next());
+
+ stmt.execute("drop table testThriftSerializeShow1");
+ stmt.execute("drop table testThriftSerializeShow2");
+ stmt.close();
+ }
+
/**
* Tests the creation of the 3 scratch dirs: hdfs, local, downloaded resources (which is also local).
* 1. Test with doAs=false: open a new JDBC session and verify the presence of directories/permissions
http://git-wip-us.apache.org/repos/asf/hive/blob/d2e294cf/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 943d9d7..489e70f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -6874,8 +6874,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
if (tblDesc == null) {
if (qb.getIsQuery()) {
String fileFormat;
- if (SessionState.get().isHiveServerQuery() &&
- conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS)) {
+ if (SessionState.get().getIsUsingThriftJDBCBinarySerDe()) {
fileFormat = "SequenceFile";
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT, fileFormat);
table_desc=
http://git-wip-us.apache.org/repos/asf/hive/blob/d2e294cf/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index fb5ca57..fb2b992 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -159,7 +159,7 @@ public abstract class TaskCompiler {
TableDesc resultTab = pCtx.getFetchTableDesc();
if (resultTab == null) {
resFileFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT);
- if (SessionState.get().isHiveServerQuery() && (conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS))
+ if (SessionState.get().getIsUsingThriftJDBCBinarySerDe()
&& (resFileFormat.equalsIgnoreCase("SequenceFile"))) {
resultTab =
PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, resFileFormat,
http://git-wip-us.apache.org/repos/asf/hive/blob/d2e294cf/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 408c92e..d23a51f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -139,6 +139,11 @@ public class SessionState {
*/
private boolean isHiveServerQuery = false;
+ /**
+ * The flag to indicate if the session using thrift jdbc binary serde or not.
+ */
+ private boolean isUsingThriftJDBCBinarySerDe = false;
+
/*
* HiveHistory Object
*/
@@ -341,6 +346,14 @@ public class SessionState {
this.isVerbose = isVerbose;
}
+ public void setIsUsingThriftJDBCBinarySerDe(boolean isUsingThriftJDBCBinarySerDe) {
+ this.isUsingThriftJDBCBinarySerDe = isUsingThriftJDBCBinarySerDe;
+ }
+
+ public boolean getIsUsingThriftJDBCBinarySerDe() {
+ return isUsingThriftJDBCBinarySerDe;
+ }
+
public void setIsHiveServerQuery(boolean isHiveServerQuery) {
this.isHiveServerQuery = isHiveServerQuery;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/d2e294cf/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index 72ad86c..8051f47 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -166,6 +166,7 @@ public class HiveSessionImpl implements HiveSession {
sessionState.setUserIpAddress(ipAddress);
sessionState.setIsHiveServerQuery(true);
sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses());
+ sessionState.setIsUsingThriftJDBCBinarySerDe(updateIsUsingThriftJDBCBinarySerDe());
SessionState.start(sessionState);
try {
sessionState.loadAuxJars();
@@ -189,7 +190,7 @@ public class HiveSessionImpl implements HiveSession {
lastIdleTime = lastAccessTime;
}
- /**
+/**
* It is used for processing hiverc file from HiveServer2 side.
*/
private class GlobalHivercFileProcessor extends HiveFileProcessor {
@@ -268,6 +269,11 @@ public class HiveSessionImpl implements HiveSession {
}
}
+ private boolean updateIsUsingThriftJDBCBinarySerDe() {
+ return (8 <= getProtocolVersion().getValue())
+ && sessionConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS);
+ }
+
@Override
public void setOperationLogSessionDir(File operationLogRootDir) {
if (!operationLogRootDir.exists()) {
@@ -355,6 +361,7 @@ public class HiveSessionImpl implements HiveSession {
// stored in the thread local for the handler thread.
SessionState.setCurrentSessionState(sessionState);
sessionState.setForwardedAddresses(SessionManager.getForwardedAddresses());
+ sessionState.setIsUsingThriftJDBCBinarySerDe(updateIsUsingThriftJDBCBinarySerDe());
if (userAccess) {
lastAccessTime = System.currentTimeMillis();
}