You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/11/07 21:24:34 UTC
svn commit: r1539799 - in /hive/trunk:
common/src/java/org/apache/hadoop/hive/conf/ data/files/
itests/hive-unit/src/test/java/org/apache/hive/jdbc/
itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/
jdbc/src/java/org/apache/hive/jdbc/ metast...
Author: brock
Date: Thu Nov 7 20:24:33 2013
New Revision: 1539799
URL: http://svn.apache.org/r1539799
Log:
HIVE-5351 - Secure-Socket-Layer (SSL) support for HiveServer2 (Prasad Mujumdar via Brock Noland)
Added:
hive/trunk/data/files/keystore.jks (with props)
hive/trunk/data/files/truststore.jks (with props)
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Nov 7 20:24:33 2013
@@ -777,6 +777,9 @@ public class HiveConf extends Configurat
HIVE_SERVER2_TABLE_TYPE_MAPPING("hive.server2.table.type.mapping", "CLASSIC",
new StringsValidator("CLASSIC", "HIVE")),
HIVE_SERVER2_SESSION_HOOK("hive.server2.session.hook", ""),
+ HIVE_SERVER2_USE_SSL("hive.server2.use.SSL", false),
+ HIVE_SERVER2_SSL_KEYSTORE_PATH("hive.server2.keystore.path", ""),
+ HIVE_SERVER2_SSL_KEYSTORE_PASSWORD("hive.server2.keystore.password", ""),
HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,delete,compile"),
Added: hive/trunk/data/files/keystore.jks
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/keystore.jks?rev=1539799&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hive/trunk/data/files/keystore.jks
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hive/trunk/data/files/truststore.jks
URL: http://svn.apache.org/viewvc/hive/trunk/data/files/truststore.jks?rev=1539799&view=auto
==============================================================================
Binary file - no diff available.
Propchange: hive/trunk/data/files/truststore.jks
------------------------------------------------------------------------------
svn:mime-type = application/octet-stream
Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java?rev=1539799&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java Thu Nov 7 20:24:33 2013
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.Statement;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+ public class TestJdbcWithMiniHS2 {
+ private static MiniHS2 miniHS2 = null;
+ private static Path dataFilePath;
+
+ private Connection hs2Conn = null;
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ Class.forName(MiniHS2.getJdbcDriverName());
+ HiveConf conf = new HiveConf();
+ miniHS2 = new MiniHS2(conf);
+ String dataFileDir = conf.get("test.data.files").replace('\\', '/')
+ .replace("c:", "");
+ dataFilePath = new Path(dataFileDir, "kv1.txt");
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ miniHS2.start();
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar");
+ hs2Conn.createStatement().execute("set hive.support.concurrency = false");
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ hs2Conn.close();
+ miniHS2.stop();
+ }
+
+ @Test
+ public void testConnection() throws Exception {
+ String tableName = "testTab1";
+ Statement stmt = hs2Conn.createStatement();
+
+ // create table
+ stmt.execute("DROP TABLE IF EXISTS " + tableName);
+ stmt.execute("CREATE TABLE " + tableName
+ + " (under_col INT COMMENT 'the under column', value STRING) COMMENT ' test table'");
+
+ // load data
+ stmt.execute("load data local inpath '"
+ + dataFilePath.toString() + "' into table " + tableName);
+
+ ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
+ assertTrue(res.next());
+ assertEquals("val_238", res.getString(2));
+ res.close();
+ stmt.close();
+ }
+}
Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java?rev=1539799&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java Thu Nov 7 20:24:33 2013
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+//import org.apache.hive.service.miniHS2.MiniHS2;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestSSL {
+ private static final String KEY_STORE_NAME = "keystore.jks";
+ private static final String TRUST_STORE_NAME = "truststore.jks";
+ private static final String KEY_STORE_PASSWORD = "HiveJdbc";
+ private static final String JAVA_TRUST_STORE_PROP = "javax.net.ssl.trustStore";
+ private static final String JAVA_TRUST_STORE_PASS_PROP = "javax.net.ssl.trustStorePassword";
+
+ private MiniHS2 miniHS2 = null;
+ private static HiveConf conf = new HiveConf();
+ private Connection hs2Conn = null;
+ private String dataFileDir = conf.get("test.data.files");
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ Class.forName(MiniHS2.getJdbcDriverName());
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ DriverManager.setLoginTimeout(0);
+ if (!System.getProperty("test.data.files", "").isEmpty()) {
+ dataFileDir = System.getProperty("test.data.files");
+ }
+ dataFileDir = dataFileDir.replace('\\', '/').replace("c:", "");
+ miniHS2 = new MiniHS2(conf);
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ if (hs2Conn != null) {
+ hs2Conn.close();
+ }
+ if (miniHS2 != null && miniHS2.isStarted()) {
+ miniHS2.stop();
+ }
+ System.clearProperty(JAVA_TRUST_STORE_PROP);
+ System.clearProperty(JAVA_TRUST_STORE_PASS_PROP);
+ }
+
+ /***
+ * Test SSL client with non-SSL server fails
+ * @throws Exception
+ */
+ @Test
+ public void testInvalidConfig() throws Exception {
+ miniHS2.start();
+ DriverManager.setLoginTimeout(4);
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" +
+ dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
+ KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+ fail("SSL connection should fail with NON-SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME );
+ System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD);
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true",
+ System.getProperty("user.name"), "bar");
+ fail("SSL connection should fail with NON-SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ }
+
+ /***
+ * Test non-SSL client with SSL server fails
+ * @throws Exception
+ */
+ @Test
+ public void testConnectionMismatch() throws Exception {
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true");
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname, "");
+ miniHS2.start();
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar");
+ fail("NON SSL connection should fail with SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ try {
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL()+ ";ssl=false",
+ System.getProperty("user.name"), "bar");
+ fail("NON SSL connection should fail with SSL server");
+ } catch (SQLException e) {
+ // expected error
+ assertEquals("08S01", e.getSQLState().trim());
+ }
+
+ }
+
+ /***
+ * Test SSL client connection to SSL server
+ * @throws Exception
+ */
+ @Test
+ public void testSSLConnectionWithURL() throws Exception {
+ // Start HS2 with SSL
+ startSslSever();
+
+ // make SSL connection
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" +
+ dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
+ KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+
+ hs2Conn.close();
+ }
+
+ /***
+ * Test SSL client connection to SSL server
+ * @throws Exception
+ */
+ @Test
+ public void testSSLConnectionWithProperty() throws Exception {
+ // Start HS2 with SSL
+ startSslSever();
+
+ System.setProperty(JAVA_TRUST_STORE_PROP, dataFileDir + File.separator + TRUST_STORE_NAME );
+ System.setProperty(JAVA_TRUST_STORE_PASS_PROP, KEY_STORE_PASSWORD);
+ // make SSL connection
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true",
+ System.getProperty("user.name"), "bar");
+
+ hs2Conn.close();
+ }
+
+ /**
+ * Start HS2 in SSL mode, open a SSL connection and fetch data
+ * @throws Exception
+ */
+ @Test
+ public void testSSLFetch() throws Exception {
+ // Start HS2 with SSL
+ startSslSever();
+
+ // make SSL connection
+ hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL() + ";ssl=true;sslTrustStore=" +
+ dataFileDir + File.separator + TRUST_STORE_NAME + ";trustStorePassword=" +
+ KEY_STORE_PASSWORD, System.getProperty("user.name"), "bar");
+
+ String tableName = "sslTab";
+ Statement stmt = hs2Conn.createStatement();
+ Path dataFilePath = new Path(dataFileDir, "kv1.txt");
+
+ stmt.execute("set hive.support.concurrency = false");
+
+ stmt.execute("drop table if exists " + tableName);
+ stmt.execute("create table " + tableName
+ + " (under_col int comment 'the under column', value string)");
+
+ // load data
+ stmt.execute("load data local inpath '"
+ + dataFilePath.toString() + "' into table " + tableName);
+
+ ResultSet res = stmt.executeQuery("SELECT * FROM " + tableName);
+ int rowCount = 0;
+ while (res.next()) {
+ ++rowCount;
+ assertEquals("val_" + res.getInt(1), res.getString(2));
+ }
+
+ // read result over SSL
+ assertEquals(500, rowCount);
+ }
+
+ private void startSslSever () throws Exception {
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_USE_SSL.varname, "true");
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname,
+ dataFileDir + File.separator + KEY_STORE_NAME);
+ miniHS2.setConfProperty(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname,
+ KEY_STORE_PASSWORD);
+ miniHS2.start();
+ }
+
+}
Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java?rev=1539799&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/AbstarctHiveService.java Thu Nov 7 20:24:33 2013
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc.miniHS2;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+
+/***
+ * Base class for Hive service
+ * AbstarctHiveService.
+ *
+ */
+public abstract class AbstarctHiveService {
+ private HiveConf hiveConf = null;
+ private String hostname;
+ private int port;
+ private boolean startedHiveService = false;
+
+ public AbstarctHiveService(HiveConf hiveConf, String hostname, int port) {
+ this.hiveConf = hiveConf;
+ this.hostname = hostname;
+ this.port = port;
+ }
+
+ /**
+ * Get Hive conf
+ * @return
+ */
+ public HiveConf getHiveConf() {
+ return hiveConf;
+ }
+
+ /**
+ * Get config property
+ * @param propertyKey
+ * @return
+ */
+ public String getConfProperty(String propertyKey) {
+ return hiveConf.get(propertyKey);
+ }
+
+ /**
+ * Set config property
+ * @param propertyKey
+ * @param propertyValue
+ */
+ public void setConfProperty(String propertyKey, String propertyValue) {
+ System.setProperty(propertyKey, propertyValue);
+ hiveConf.set(propertyKey, propertyValue);
+ }
+
+ /**
+ * Retrieve warehouse directory
+ * @return
+ */
+ public Path getWareHouseDir() {
+ return new Path(hiveConf.getVar(ConfVars.METASTOREWAREHOUSE));
+ }
+
+ public void setWareHouseDir(String wareHouseURI) {
+ verifyNotStarted();
+ System.setProperty(ConfVars.METASTOREWAREHOUSE.varname, wareHouseURI);
+ hiveConf.setVar(ConfVars.METASTOREWAREHOUSE, wareHouseURI);
+ }
+
+ /**
+ * Set service host
+ * @param hostName
+ */
+ public void setHost(String hostName) {
+ this.hostname = hostName;
+ }
+
+ // get service host
+ protected String getHost() {
+ return hostname;
+ }
+
+ /**
+ * Set service port #
+ * @param portNum
+ */
+ public void setPort(int portNum) {
+ this.port = portNum;
+ }
+
+ // get service port#
+ protected int getPort() {
+ return port;
+ }
+
+ public boolean isStarted() {
+ return startedHiveService;
+ }
+
+ protected void setStarted(boolean hiveServiceStatus) {
+ this.startedHiveService = hiveServiceStatus;
+ }
+
+ protected void verifyStarted() {
+ if (!isStarted()) {
+ throw new IllegalStateException("HS2 is not running");
+ }
+ }
+
+ protected void verifyNotStarted() {
+ if (isStarted()) {
+ throw new IllegalStateException("HS2 alreadyrunning");
+ }
+ }
+
+}
Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java?rev=1539799&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java Thu Nov 7 20:24:33 2013
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc.miniHS2;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hive.service.Service;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.SessionHandle;
+import org.apache.hive.service.cli.thrift.ThriftBinaryCLIService;
+import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.apache.hive.service.server.HiveServer2;
+
+import com.google.common.io.Files;
+
+public class MiniHS2 extends AbstarctHiveService {
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+ private HiveServer2 hiveServer2 = null;
+ private final File baseDir;
+ private static final AtomicLong hs2Counter = new AtomicLong();
+
+ public MiniHS2(HiveConf hiveConf) throws IOException {
+ super(hiveConf, "localhost", MetaStoreUtils.findFreePort());
+ baseDir = Files.createTempDir();
+ setWareHouseDir("file://" + baseDir.getPath() + File.separator + "warehouse");
+ String metaStoreURL = "jdbc:derby:" + baseDir.getAbsolutePath() + File.separator + "test_metastore-" +
+ hs2Counter.incrementAndGet() + ";create=true";
+
+ System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
+ hiveConf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, metaStoreURL);
+ hiveConf.setVar(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST, getHost());
+ hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_PORT, getPort());
+ HiveMetaStore.HMSHandler.resetDefaultDBFlag();
+ }
+
+ public void start() throws Exception {
+ hiveServer2 = new HiveServer2();
+ hiveServer2.init(getHiveConf());
+ hiveServer2.start();
+ waitForStartup();
+ setStarted(true);
+ }
+
+ public void stop() {
+ verifyStarted();
+ hiveServer2.stop();
+ setStarted(false);
+ FileUtils.deleteQuietly(baseDir);
+ }
+
+ public CLIServiceClient getServiceClient() {
+ verifyStarted();
+ return getServiceClientInternal();
+ }
+
+ public CLIServiceClient getServiceClientInternal() {
+ for (Service service : hiveServer2.getServices()) {
+ if (service instanceof ThriftBinaryCLIService) {
+ return new ThriftCLIServiceClient((ThriftBinaryCLIService)service);
+ }
+ }
+ throw new IllegalStateException("HS2 not running Thrift service");
+ }
+
+ public String getJdbcURL() {
+ return "jdbc:hive2://" + getHost() + ":" + getPort() + "/default";
+ }
+
+ public static String getJdbcDriverName() {
+ return driverName;
+ }
+
+ private void waitForStartup() throws Exception {
+ int waitTime = 0;
+ long startupTimeout = 1000L * 1000000000L;
+ CLIServiceClient hs2Client = getServiceClientInternal();
+ SessionHandle sessionHandle = null;
+ do {
+ Thread.sleep(500L);
+ waitTime += 500L;
+ if (waitTime > startupTimeout) {
+ throw new TimeoutException("Couldn't access new HiveServer: " + getJdbcURL());
+ }
+ try {
+ sessionHandle = hs2Client.openSession("foo", "bar");
+ } catch (Exception e) {
+ // service not started yet
+ continue;
+ }
+ hs2Client.closeSession(sessionHandle);
+ break;
+ } while (true);
+ }
+
+}
Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java?rev=1539799&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/miniHS2/TestHiveServer2.java Thu Nov 7 20:24:33 2013
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc.miniHS2;
+
+import static org.junit.Assert.assertFalse;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.OperationHandle;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.SessionHandle;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestHiveServer2 {
+
+ private static MiniHS2 miniHS2 = null;
+ private Map<String, String> confOverlay;
+
+ @BeforeClass
+ public static void beforeTest() throws IOException {
+ miniHS2 = new MiniHS2(new HiveConf());
+ }
+
+ @Before
+ public void setUp() throws Exception {
+ miniHS2.start();
+ confOverlay = new HashMap<String, String>();
+ }
+
+ @After
+ public void tearDown() {
+ miniHS2.stop();
+ }
+
+ @Test
+ public void testConnection() throws Exception {
+ String tabName = "testTab1";
+ CLIServiceClient serviceClient = miniHS2.getServiceClient();
+ SessionHandle sessHandle = serviceClient.openSession("foo", "bar");
+ serviceClient.executeStatement(sessHandle, "DROP TABLE IF EXISTS tab", confOverlay);
+ serviceClient.executeStatement(sessHandle, "CREATE TABLE " + tabName + " (id INT)", confOverlay);
+ OperationHandle opHandle = serviceClient.executeStatement(sessHandle, "SHOW TABLES", confOverlay);
+ RowSet rowSet = serviceClient.fetchResults(opHandle);
+ assertFalse(rowSet.getSize() == 0);
+ }
+}
Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Thu Nov 7 20:24:33 2013
@@ -24,6 +24,7 @@ import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
+import java.sql.DriverManager;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
@@ -46,6 +47,7 @@ import javax.security.sasl.SaslException
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.auth.KerberosSaslHelper;
import org.apache.hive.service.auth.PlainSaslHelper;
import org.apache.hive.service.auth.SaslQOP;
@@ -61,7 +63,6 @@ import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.THttpClient;
-import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
@@ -78,6 +79,10 @@ public class HiveConnection implements j
private static final String HIVE_AUTH_PASSWD = "password";
private static final String HIVE_ANONYMOUS_USER = "anonymous";
private static final String HIVE_ANONYMOUS_PASSWD = "anonymous";
+ private static final String HIVE_USE_SSL = "ssl";
+ private static final String HIVE_SSL_TRUST_STORE = "sslTrustStore";
+ private static final String HIVE_SSL_TRUST_STORE_PASSWORD = "trustStorePassword";
+
private final String jdbcURI;
private final String host;
private final int port;
@@ -91,8 +96,10 @@ public class HiveConnection implements j
private SQLWarning warningChain = null;
private TSessionHandle sessHandle = null;
private final List<TProtocolVersion> supportedProtocols = new LinkedList<TProtocolVersion>();
+ private int loginTimeout = 0;
public HiveConnection(String uri, Properties info) throws SQLException {
+ loginTimeout = DriverManager.getLoginTimeout();
jdbcURI = uri;
// parse the connection uri
Utils.JdbcConnectionParams connParams = Utils.parseURL(jdbcURI);
@@ -178,26 +185,26 @@ public class HiveConnection implements j
}
private TTransport createBinaryTransport() throws SQLException {
- transport = new TSocket(host, port);
- // handle secure connection if specified
- if (!sessConfMap.containsKey(HIVE_AUTH_TYPE)
- || !sessConfMap.get(HIVE_AUTH_TYPE).equals(HIVE_AUTH_SIMPLE)) {
- try {
+ try {
+ // handle secure connection if specified
+ if (!HIVE_AUTH_SIMPLE.equals(sessConfMap.get(HIVE_AUTH_TYPE))) {
// If Kerberos
if (sessConfMap.containsKey(HIVE_AUTH_PRINCIPAL)) {
Map<String, String> saslProps = new HashMap<String, String>();
SaslQOP saslQOP = SaslQOP.AUTH;
- if(sessConfMap.containsKey(HIVE_AUTH_QOP)) {
+ if (sessConfMap.containsKey(HIVE_AUTH_QOP)) {
try {
saslQOP = SaslQOP.fromString(sessConfMap.get(HIVE_AUTH_QOP));
} catch (IllegalArgumentException e) {
- throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(), "42000", e);
+ throw new SQLException("Invalid " + HIVE_AUTH_QOP + " parameter. " + e.getMessage(),
+ "42000", e);
}
}
saslProps.put(Sasl.QOP, saslQOP.toString());
saslProps.put(Sasl.SERVER_AUTH, "true");
transport = KerberosSaslHelper.getKerberosTransport(
- sessConfMap.get(HIVE_AUTH_PRINCIPAL), host, transport, saslProps);
+ sessConfMap.get(HIVE_AUTH_PRINCIPAL), host,
+ HiveAuthFactory.getSocketTransport(host, port, loginTimeout), saslProps);
} else {
String userName = sessConfMap.get(HIVE_AUTH_USER);
if ((userName == null) || userName.isEmpty()) {
@@ -207,12 +214,30 @@ public class HiveConnection implements j
if ((passwd == null) || passwd.isEmpty()) {
passwd = HIVE_ANONYMOUS_PASSWD;
}
+ String useSslStr = sessConfMap.get(HIVE_USE_SSL);
+ if ("true".equalsIgnoreCase(useSslStr)) {
+ String sslTrustStore = sessConfMap.get(HIVE_SSL_TRUST_STORE);
+ String sslTrustStorePassword = sessConfMap.get(HIVE_SSL_TRUST_STORE_PASSWORD);
+ if (sslTrustStore == null || sslTrustStore.isEmpty()) {
+ transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout);
+ } else {
+ transport = HiveAuthFactory.getSSLSocket(host, port, loginTimeout,
+ sslTrustStore, sslTrustStorePassword);
+ }
+ } else {
+ transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
+ }
transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
}
- } catch (SaslException e) {
- throw new SQLException("Could not create secure connection to "
- + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+ } else {
+ transport = HiveAuthFactory.getSocketTransport(host, port, loginTimeout);
}
+ } catch (SaslException e) {
+ throw new SQLException("Could not create secure connection to "
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
+ } catch (TTransportException e) {
+ throw new SQLException("Could not create connection to "
+ + jdbcURI + ": " + e.getMessage(), " 08S01", e);
}
return transport;
}
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Nov 7 20:24:33 2013
@@ -277,6 +277,10 @@ public class HiveMetaStore extends Thrif
return threadLocalId.get();
}
+ public static void resetDefaultDBFlag() {
+ createDefaultDB = false;
+ }
+
public HMSHandler(String name) throws MetaException {
super(name);
hiveConf = new HiveConf(this.getClass());
@@ -4109,7 +4113,6 @@ public class HiveMetaStore extends Thrif
}
-
/**
* Discard a current delegation token.
*
Modified: hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java Thu Nov 7 20:24:33 2013
@@ -18,6 +18,12 @@
package org.apache.hive.service.auth;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.text.MessageFormat;
+import java.util.HashMap;
+import java.util.Map;
import javax.security.auth.login.LoginException;
import javax.security.sasl.Sasl;
@@ -28,15 +34,15 @@ import org.apache.hadoop.hive.shims.Shim
import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.thrift.TProcessorFactory;
+import org.apache.thrift.transport.TSSLTransportFactory;
+import org.apache.thrift.transport.TServerSocket;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.text.MessageFormat;
-import java.util.HashMap;
-import java.util.Map;
-
public class HiveAuthFactory {
private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class);
@@ -153,4 +159,44 @@ public class HiveAuthFactory {
}
}
+ public static TTransport getSocketTransport(String host, int port, int loginTimeout)
+ throws TTransportException {
+ return new TSocket(host, port, loginTimeout);
+ }
+
+ public static TTransport getSSLSocket(String host, int port, int loginTimeout)
+ throws TTransportException {
+ return TSSLTransportFactory.getClientSocket(host, port, loginTimeout);
+ }
+
+ public static TTransport getSSLSocket(String host, int port, int loginTimeout,
+ String trustStorePath, String trustStorePassWord) throws TTransportException {
+ TSSLTransportFactory.TSSLTransportParameters params =
+ new TSSLTransportFactory.TSSLTransportParameters();
+ params.setTrustStore(trustStorePath, trustStorePassWord);
+ params.requireClientAuth(true);
+ return TSSLTransportFactory.getClientSocket(host, port, loginTimeout, params);
+ }
+
+ public static TServerSocket getServerSocket(String hiveHost, int portNum)
+ throws TTransportException {
+ InetSocketAddress serverAddress = null;
+ if (hiveHost != null && !hiveHost.isEmpty()) {
+ serverAddress = new InetSocketAddress(hiveHost, portNum);
+ } else {
+ serverAddress = new InetSocketAddress(portNum);
+ }
+ return new TServerSocket(serverAddress );
+ }
+
+ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum,
+ String keyStorePath, String keyStorePassWord) throws TTransportException, UnknownHostException {
+ TSSLTransportFactory.TSSLTransportParameters params =
+ new TSSLTransportFactory.TSSLTransportParameters();
+ params.setKeyStore(keyStorePath, keyStorePassWord);
+
+ return TSSLTransportFactory.getServerSocket(portNum, 10000,
+ InetAddress.getByName(hiveHost), params);
+ }
+
}
Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java Thu Nov 7 20:24:33 2013
@@ -64,7 +64,19 @@ public class ThriftBinaryCLIService exte
minWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS);
maxWorkerThreads = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS);
- TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(new TServerSocket(serverAddress))
+ TServerSocket serverSocket = null;
+ if (!hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_USE_SSL)) {
+ serverSocket = HiveAuthFactory.getServerSocket(hiveHost, portNum);
+ } else {
+ String keyStorePath = hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH).trim();
+ if (keyStorePath.isEmpty()) {
+ throw new IllegalArgumentException(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PATH.varname +
+ " Not configured for SSL connection");
+ }
+ serverSocket = HiveAuthFactory.getServerSSLSocket(hiveHost, portNum,
+ keyStorePath, hiveConf.getVar(ConfVars.HIVE_SERVER2_SSL_KEYSTORE_PASSWORD));
+ }
+ TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket)
.processorFactory(processorFactory)
.transportFactory(transportFactory)
.protocolFactory(new TBinaryProtocol.Factory())
@@ -82,4 +94,4 @@ public class ThriftBinaryCLIService exte
}
}
-}
\ No newline at end of file
+}
Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Thu Nov 7 20:24:33 2013
@@ -611,6 +611,10 @@ public class Hadoop20Shims implements Ha
@Override
public UserGroupInformation createRemoteUser(String userName, List<String> groupNames) {
+ if (groupNames.isEmpty()) {
+ groupNames = new ArrayList<String>();
+ groupNames.add(userName);
+ }
return new UnixUserGroupInformation(userName, groupNames.toArray(new String[0]));
}
Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1539799&r1=1539798&r2=1539799&view=diff
==============================================================================
--- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Thu Nov 7 20:24:33 2013
@@ -545,7 +545,6 @@ public abstract class HadoopShimsSecure
return tokenPath;
}
-
@Override
public UserGroupInformation createProxyUser(String userName) throws IOException {
return UserGroupInformation.createProxyUser(