You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/30 17:22:48 UTC
svn commit: r1635536 [5/28] - in /hive/branches/spark: ./ accumulo-handler/
accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/columns/
accumulo-handler/src/test/org/apache/hadoop/hive/accumulo/columns/
accumulo-handler/src/test/org/apache/hadoo...
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java Thu Oct 30 16:22:33 2014
@@ -19,8 +19,11 @@
//The tests here are heavily based on some timing, so there is some chance to fail.
package org.apache.hadoop.hive.hooks;
-import java.util.Properties;
+import java.io.Serializable;
+import java.lang.Override;
import java.sql.Statement;
+import java.util.List;
+import java.util.Properties;
import junit.framework.Assert;
@@ -28,9 +31,15 @@ import org.apache.hadoop.hive.conf.HiveC
import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
import org.apache.hadoop.hive.ql.hooks.HookContext;
import org.apache.hadoop.hive.ql.hooks.HookContext.HookType;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.service.server.HiveServer2;
import org.junit.AfterClass;
+import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
@@ -44,10 +53,10 @@ public class TestHs2Hooks {
private static HiveServer2 hiveServer2;
public static class PostExecHook implements ExecuteWithHookContext {
- private static String userName;
- private static String ipAddress;
- private static String operation;
- private static Throwable error;
+ public static String userName;
+ public static String ipAddress;
+ public static String operation;
+ public static Throwable error;
public void run(HookContext hookContext) {
try {
@@ -64,10 +73,10 @@ public class TestHs2Hooks {
}
public static class PreExecHook implements ExecuteWithHookContext {
- private static String userName;
- private static String ipAddress;
- private static String operation;
- private static Throwable error;
+ public static String userName;
+ public static String ipAddress;
+ public static String operation;
+ public static Throwable error;
public void run(HookContext hookContext) {
try {
@@ -83,6 +92,41 @@ public class TestHs2Hooks {
}
}
+ public static class SemanticAnalysisHook implements HiveSemanticAnalyzerHook {
+ public static String userName;
+ public static String command;
+ public static String ipAddress;
+ public static Throwable preAnalyzeError;
+ public static Throwable postAnalyzeError;
+
+ @Override
+ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context,
+ ASTNode ast) throws SemanticException {
+ try {
+ userName = context.getUserName();
+ ipAddress = context.getIpAddress();
+ command = context.getCommand();
+ } catch (Throwable t) {
+ LOG.error("Error in semantic analysis hook preAnalyze: " + t, t);
+ preAnalyzeError = t;
+ }
+ return ast;
+ }
+
+ @Override
+ public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+ List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ try {
+ userName = context.getUserName();
+ ipAddress = context.getIpAddress();
+ command = context.getCommand();
+ } catch (Throwable t) {
+ LOG.error("Error in semantic analysis hook postAnalyze: " + t, t);
+ postAnalyzeError = t;
+ }
+ }
+ }
+
/**
* @throws java.lang.Exception
*/
@@ -93,6 +137,8 @@ public class TestHs2Hooks {
PreExecHook.class.getName());
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS,
PostExecHook.class.getName());
+ hiveConf.setVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK,
+ SemanticAnalysisHook.class.getName());
hiveServer2 = new HiveServer2();
hiveServer2.init(hiveConf);
@@ -107,16 +153,32 @@ public class TestHs2Hooks {
}
}
+ @Before
+ public void setUpTest() throws Exception {
+ PreExecHook.userName = null;
+ PreExecHook.ipAddress = null;
+ PreExecHook.operation = null;
+ PreExecHook.error = null;
+ PostExecHook.userName = null;
+ PostExecHook.ipAddress = null;
+ PostExecHook.operation = null;
+ PostExecHook.error = null;
+ SemanticAnalysisHook.userName = null;
+ SemanticAnalysisHook.ipAddress = null;
+ SemanticAnalysisHook.command = null;
+ SemanticAnalysisHook.preAnalyzeError = null;
+ SemanticAnalysisHook.postAnalyzeError = null;
+ }
+
/**
- * Test get IpAddress and username from hook.
+ * Test that hook context properties are correctly set.
*/
@Test
- public void testIpUserName() throws Throwable {
+ public void testHookContexts() throws Throwable {
Properties connProp = new Properties();
connProp.setProperty("user", System.getProperty("user.name"));
connProp.setProperty("password", "");
HiveConnection connection = new HiveConnection("jdbc:hive2://localhost:10000/default", connProp);
-
Statement stmt = connection.createStatement();
stmt.executeQuery("show databases");
stmt.executeQuery("show tables");
@@ -142,6 +204,24 @@ public class TestHs2Hooks {
Assert.assertNotNull(PreExecHook.operation , "operation is null");
Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
+
+ error = SemanticAnalysisHook.preAnalyzeError;
+ if (error != null) {
+ throw error;
+ }
+ error = SemanticAnalysisHook.postAnalyzeError;
+ if (error != null) {
+ throw error;
+ }
+
+ Assert.assertNotNull(SemanticAnalysisHook.ipAddress,
+ "semantic hook context ipaddress is null");
+ Assert.assertNotNull(SemanticAnalysisHook.userName,
+ "semantic hook context userName is null");
+ Assert.assertNotNull(SemanticAnalysisHook.command ,
+ "semantic hook context command is null");
+ Assert.assertTrue(SemanticAnalysisHook.ipAddress,
+ SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
+ Assert.assertEquals("show tables", SemanticAnalysisHook.command);
}
}
-
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestMetaStoreEventListener.java Thu Oct 30 16:22:33 2014
@@ -29,26 +29,33 @@ import junit.framework.TestCase;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PartitionEventType;
import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.AddIndexEvent;
import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterIndexEvent;
import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.DropIndexEvent;
import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
import org.apache.hadoop.hive.metastore.events.DropTableEvent;
import org.apache.hadoop.hive.metastore.events.ListenerEvent;
import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent;
+import org.apache.hadoop.hive.metastore.events.PreAddIndexEvent;
import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterIndexEvent;
import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent;
import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropIndexEvent;
import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
import org.apache.hadoop.hive.metastore.events.PreEventContext;
@@ -183,12 +190,35 @@ public class TestMetaStoreEventListener
assertEquals(expectedDb, actualDb);
}
+ private void validateIndex(Index expectedIndex, Index actualIndex) {
+ assertEquals(expectedIndex.getDbName(), actualIndex.getDbName());
+ assertEquals(expectedIndex.getIndexName(), actualIndex.getIndexName());
+ assertEquals(expectedIndex.getIndexHandlerClass(), actualIndex.getIndexHandlerClass());
+ assertEquals(expectedIndex.getOrigTableName(), actualIndex.getOrigTableName());
+ assertEquals(expectedIndex.getIndexTableName(), actualIndex.getIndexTableName());
+ assertEquals(expectedIndex.getSd().getLocation(), actualIndex.getSd().getLocation());
+ }
+
+ private void validateAddIndex(Index expectedIndex, Index actualIndex) {
+ validateIndex(expectedIndex, actualIndex);
+ }
+
+ private void validateAlterIndex(Index expectedOldIndex, Index actualOldIndex,
+ Index expectedNewIndex, Index actualNewIndex) {
+ validateIndex(expectedOldIndex, actualOldIndex);
+ validateIndex(expectedNewIndex, actualNewIndex);
+ }
+
+ private void validateDropIndex(Index expectedIndex, Index actualIndex) {
+ validateIndex(expectedIndex, actualIndex);
+ }
+
public void testListener() throws Exception {
int listSize = 0;
List<ListenerEvent> notifyList = DummyListener.notifyList;
- assertEquals(notifyList.size(), listSize);
List<PreEventContext> preNotifyList = DummyPreListener.notifyList;
+ assertEquals(notifyList.size(), listSize);
assertEquals(preNotifyList.size(), listSize);
driver.run("create database " + dbName);
@@ -216,6 +246,48 @@ public class TestMetaStoreEventListener
assert tblEvent.getStatus();
validateCreateTable(tbl, tblEvent.getTable());
+ driver.run("create index tmptbl_i on table tmptbl(a) as 'compact' " +
+ "WITH DEFERRED REBUILD IDXPROPERTIES ('prop1'='val1', 'prop2'='val2')");
+ listSize += 2; // creates index table internally
+ assertEquals(notifyList.size(), listSize);
+
+ AddIndexEvent addIndexEvent = (AddIndexEvent)notifyList.get(listSize - 1);
+ assert addIndexEvent.getStatus();
+ PreAddIndexEvent preAddIndexEvent = (PreAddIndexEvent)(preNotifyList.get(preNotifyList.size() - 3));
+
+ Index oldIndex = msc.getIndex(dbName, "tmptbl", "tmptbl_i");
+
+ validateAddIndex(oldIndex, addIndexEvent.getIndex());
+
+ validateAddIndex(oldIndex, preAddIndexEvent.getIndex());
+
+ driver.run("alter index tmptbl_i on tmptbl set IDXPROPERTIES " +
+ "('prop1'='val1_new', 'prop3'='val3')");
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+
+ Index newIndex = msc.getIndex(dbName, "tmptbl", "tmptbl_i");
+
+ AlterIndexEvent alterIndexEvent = (AlterIndexEvent) notifyList.get(listSize - 1);
+ assert alterIndexEvent.getStatus();
+ validateAlterIndex(oldIndex, alterIndexEvent.getOldIndex(),
+ newIndex, alterIndexEvent.getNewIndex());
+
+ PreAlterIndexEvent preAlterIndexEvent = (PreAlterIndexEvent) (preNotifyList.get(preNotifyList.size() - 1));
+ validateAlterIndex(oldIndex, preAlterIndexEvent.getOldIndex(),
+ newIndex, preAlterIndexEvent.getNewIndex());
+
+ driver.run("drop index tmptbl_i on tmptbl");
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+
+ DropIndexEvent dropIndexEvent = (DropIndexEvent) notifyList.get(listSize - 1);
+ assert dropIndexEvent.getStatus();
+ validateDropIndex(newIndex, dropIndexEvent.getIndex());
+
+ PreDropIndexEvent preDropIndexEvent = (PreDropIndexEvent) (preNotifyList.get(preNotifyList.size() - 1));
+ validateDropIndex(newIndex, preDropIndexEvent.getIndex());
+
driver.run("alter table tmptbl add partition (b='2011')");
listSize++;
assertEquals(notifyList.size(), listSize);
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java Thu Oct 30 16:22:33 2014
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.process
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim;
import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.Before;
import org.junit.Test;
/**
@@ -83,10 +84,15 @@ public abstract class FolderPermissionBa
fs.mkdirs(warehouseDir);
conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());
+ // Assuming the tests are run either in C or D drive in Windows OS!
dataFileDir = conf.get("test.data.files").replace('\\', '/')
- .replace("c:", "");
+ .replace("c:", "").replace("C:", "").replace("D:", "").replace("d:", "");
dataFilePath = new Path(dataFileDir, "kv1.txt");
+ // Set up scratch directory
+ Path scratchDir = new Path(baseDfsDir, "scratchdir");
+ conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString());
+
//set hive conf vars
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
@@ -114,6 +120,11 @@ public abstract class FolderPermissionBa
Assert.assertEquals(0,ret.getResponseCode());
}
+ @Before
+ public void setupBeforeTest() throws Exception {
+ driver.run("USE default");
+ }
+
@Test
public void testCreateDb() throws Exception {
//see if db inherits permission from warehouse directory.
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java Thu Oct 30 16:22:33 2014
@@ -230,8 +230,9 @@ public class TestCompactor {
t.setThreadId((int) t.getId());
t.setHiveConf(conf);
MetaStoreThread.BooleanPointer stop = new MetaStoreThread.BooleanPointer();
+ MetaStoreThread.BooleanPointer looped = new MetaStoreThread.BooleanPointer();
stop.boolVal = true;
- t.init(stop);
+ t.init(stop, looped);
t.run();
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestDBTokenStore.java Thu Oct 30 16:22:33 2014
@@ -37,7 +37,7 @@ public class TestDBTokenStore extends Te
public void testDBTokenStore() throws TokenStoreException, MetaException, IOException {
DelegationTokenStore ts = new DBTokenStore();
- ts.setStore(new HMSHandler("Test handler"));
+ ts.init(new HMSHandler("Test handler").getMS(), null);
assertEquals(0, ts.getMasterKeys().length);
assertEquals(false,ts.removeMasterKey(-1));
try{
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestZooKeeperTokenStore.java Thu Oct 30 16:22:33 2014
@@ -24,25 +24,28 @@ import java.util.List;
import junit.framework.TestCase;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+import org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge.Server.ServerMode;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
import org.apache.hadoop.security.token.delegation.HiveDelegationTokenSupport;
import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.data.ACL;
-import org.apache.zookeeper.data.Stat;
import org.junit.Assert;
public class TestZooKeeperTokenStore extends TestCase {
private MiniZooKeeperCluster zkCluster = null;
- private ZooKeeper zkClient = null;
+ private CuratorFramework zkClient = null;
private int zkPort = -1;
private ZooKeeperTokenStore ts;
// connect timeout large enough for slower test environments
private final int connectTimeoutMillis = 30000;
+ private final int sessionTimeoutMillis = 3000;
@Override
protected void setUp() throws Exception {
@@ -53,8 +56,10 @@ public class TestZooKeeperTokenStore ext
this.zkCluster = new MiniZooKeeperCluster();
this.zkPort = this.zkCluster.startup(zkDataDir);
- this.zkClient = ZooKeeperTokenStore.createConnectedClient("localhost:" + zkPort, 3000,
- connectTimeoutMillis);
+ this.zkClient = CuratorFrameworkFactory.builder().connectString("localhost:" + zkPort)
+ .sessionTimeoutMs(sessionTimeoutMillis).connectionTimeoutMs(connectTimeoutMillis)
+ .retryPolicy(new ExponentialBackoffRetry(1000, 3)).build();
+ this.zkClient.start();
}
@Override
@@ -84,14 +89,16 @@ public class TestZooKeeperTokenStore ext
public void testTokenStorage() throws Exception {
String ZK_PATH = "/zktokenstore-testTokenStorage";
ts = new ZooKeeperTokenStore();
- ts.setConf(createConf(ZK_PATH));
+ Configuration conf = createConf(ZK_PATH);
+ conf.set(HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL, "world:anyone:cdrwa");
+ ts.setConf(conf);
+ ts.init(null, ServerMode.METASTORE);
+
+ String metastore_zk_path = ZK_PATH + ServerMode.METASTORE;
int keySeq = ts.addMasterKey("key1Data");
- byte[] keyBytes = zkClient.getData(
- ZK_PATH
- + "/keys/"
- + String.format(ZooKeeperTokenStore.ZK_SEQ_FORMAT,
- keySeq), false, null);
+ byte[] keyBytes = zkClient.getData().forPath(
+ metastore_zk_path + "/keys/" + String.format(ZooKeeperTokenStore.ZK_SEQ_FORMAT, keySeq));
assertNotNull(keyBytes);
assertEquals(new String(keyBytes), "key1Data");
@@ -116,8 +123,7 @@ public class TestZooKeeperTokenStore ext
HiveDelegationTokenSupport
.encodeDelegationTokenInformation(tokenInfoRead));
- List<DelegationTokenIdentifier> allIds = ts
- .getAllDelegationTokenIdentifiers();
+ List<DelegationTokenIdentifier> allIds = ts.getAllDelegationTokenIdentifiers();
assertEquals(1, allIds.size());
Assert.assertEquals(TokenStoreDelegationTokenSecretManager
.encodeWritable(tokenId),
@@ -138,10 +144,10 @@ public class TestZooKeeperTokenStore ext
ts = new ZooKeeperTokenStore();
try {
ts.setConf(conf);
+ ts.init(null, ServerMode.METASTORE);
fail("expected ACL exception");
} catch (DelegationTokenStore.TokenStoreException e) {
- assertEquals(e.getCause().getClass(),
- KeeperException.NoAuthException.class);
+ assertEquals(KeeperException.NoAuthException.class, e.getCause().getClass());
}
}
@@ -159,10 +165,10 @@ public class TestZooKeeperTokenStore ext
ts = new ZooKeeperTokenStore();
try {
ts.setConf(conf);
+ ts.init(null, ServerMode.METASTORE);
fail("expected ACL exception");
} catch (DelegationTokenStore.TokenStoreException e) {
- assertEquals(e.getCause().getClass(),
- KeeperException.InvalidACLException.class);
+ assertEquals(KeeperException.InvalidACLException.class, e.getCause().getClass());
}
}
@@ -171,10 +177,11 @@ public class TestZooKeeperTokenStore ext
Configuration conf = createConf(ZK_PATH);
conf.set(
HadoopThriftAuthBridge20S.Server.DELEGATION_TOKEN_STORE_ZK_ACL,
- "world:anyone:cdrwa,ip:127.0.0.1:cdrwa");
+ "ip:127.0.0.1:cdrwa,world:anyone:cdrwa");
ts = new ZooKeeperTokenStore();
ts.setConf(conf);
- List<ACL> acl = zkClient.getACL(ZK_PATH, new Stat());
+ ts.init(null, ServerMode.METASTORE);
+ List<ACL> acl = zkClient.getACL().forPath(ZK_PATH + ServerMode.METASTORE);
assertEquals(2, acl.size());
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Thu Oct 30 16:22:33 2014
@@ -34,11 +34,12 @@ import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.server.HiveServer2;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@@ -48,15 +49,12 @@ import org.junit.Test;
* TestBeeLineWithArgs - executes tests of the command-line arguments to BeeLine
*
*/
-//public class TestBeeLineWithArgs extends TestCase {
public class TestBeeLineWithArgs {
// Default location of HiveServer2
- final private static String JDBC_URL = BeeLine.BEELINE_DEFAULT_JDBC_URL + "localhost:10000";
private static final String tableName = "TestBeelineTable1";
private static final String tableComment = "Test table comment";
-
- private static HiveServer2 hiveServer2;
+ private static MiniHS2 miniHS2;
private List<String> getBaseArgs(String jdbcUrl) {
List<String> argList = new ArrayList<String>(8);
@@ -74,17 +72,9 @@ public class TestBeeLineWithArgs {
HiveConf hiveConf = new HiveConf();
// Set to non-zk lock manager to prevent HS2 from trying to connect
hiveConf.setVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER, "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
-
- // hiveConf.logVars(System.err);
- // System.err.flush();
-
- hiveServer2 = new HiveServer2();
- hiveServer2.init(hiveConf);
- System.err.println("Starting HiveServer2...");
- hiveServer2.start();
- Thread.sleep(1000);
+ miniHS2 = new MiniHS2(hiveConf);
+ miniHS2.start(new HashMap<String, String>());
createTable();
-
}
/**
@@ -94,7 +84,7 @@ public class TestBeeLineWithArgs {
*/
private static void createTable() throws ClassNotFoundException, SQLException {
Class.forName(BeeLine.BEELINE_DEFAULT_JDBC_DRIVER);
- Connection con = DriverManager.getConnection(JDBC_URL,"", "");
+ Connection con = DriverManager.getConnection(miniHS2.getBaseJdbcURL(),"", "");
assertNotNull("Connection is null", con);
assertFalse("Connection should not be closed", con.isClosed());
@@ -129,13 +119,8 @@ public class TestBeeLineWithArgs {
*/
@AfterClass
public static void postTests() {
- try {
- if (hiveServer2 != null) {
- System.err.println("Stopping HiveServer2...");
- hiveServer2.stop();
- }
- } catch (Throwable t) {
- t.printStackTrace();
+ if (miniHS2.isStarted()) {
+ miniHS2.stop();
}
}
@@ -164,22 +149,20 @@ public class TestBeeLineWithArgs {
* Test for presence of an expected pattern
* in the output (stdout or stderr), fail if not found
* Print PASSED or FAILED
- * @paramm testName Name of test to print
* @param expectedPattern Text to look for in command output/error
* @param shouldMatch true if the pattern should be found, false if it should not
* @throws Exception on command execution error
*/
- private void testScriptFile(String testName, String scriptText, String expectedPattern,
+ private void testScriptFile(String scriptText, String expectedPattern,
boolean shouldMatch, List<String> argList) throws Throwable {
// Put the script content in a temp file
- File scriptFile = File.createTempFile(testName, "temp");
+ File scriptFile = File.createTempFile(this.getClass().getSimpleName(), "temp");
scriptFile.deleteOnExit();
PrintStream os = new PrintStream(new FileOutputStream(scriptFile));
os.print(scriptText);
os.close();
- System.out.println(">>> STARTED -f " + testName);
{
List<String> copy = new ArrayList<String>(argList);
copy.add("-f");
@@ -189,12 +172,11 @@ public class TestBeeLineWithArgs {
boolean matches = output.contains(expectedPattern);
if (shouldMatch != matches) {
//failed
- fail(testName + ": Output" + output + " should" + (shouldMatch ? "" : " not") +
+ fail("Output" + output + " should" + (shouldMatch ? "" : " not") +
" contain " + expectedPattern);
}
}
- System.out.println(">>> STARTED -i " + testName);
{
List<String> copy = new ArrayList<String>(argList);
copy.add("-i");
@@ -204,7 +186,7 @@ public class TestBeeLineWithArgs {
boolean matches = output.contains(expectedPattern);
if (shouldMatch != matches) {
//failed
- fail(testName + ": Output" + output + " should" + (shouldMatch ? "" : " not") +
+ fail("Output" + output + " should" + (shouldMatch ? "" : " not") +
" contain " + expectedPattern);
}
}
@@ -217,11 +199,10 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testWhitespaceBeforeCommentScriptFile() throws Throwable {
- final String TEST_NAME = "testWhitespaceBeforeCommentScriptFile";
- final String SCRIPT_TEXT = " -- comment has spaces and tabs before it\n # comment has spaces and tabs before it\n";
- final String EXPECTED_PATTERN = "cannot recognize input near '<EOF>'";
- List<String> argList = getBaseArgs(JDBC_URL);
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, argList);
+ final String SCRIPT_TEXT = " -- comment has spaces and tabs before it\n # comment has spaces and tabs before it\n";
+ final String EXPECTED_PATTERN = "cannot recognize input near '<EOF>'";
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, false, argList);
}
/**
@@ -232,11 +213,10 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testPositiveScriptFile() throws Throwable {
- final String TEST_NAME = "testPositiveScriptFile";
final String SCRIPT_TEXT = "show databases;\n";
final String EXPECTED_PATTERN = " default ";
- List<String> argList = getBaseArgs(JDBC_URL);
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ testScriptFile( SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
@@ -247,24 +227,22 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testBeelineHiveVariable() throws Throwable {
- List<String> argList = getBaseArgs(JDBC_URL);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
argList.add("--hivevar");
argList.add("DUMMY_TBL=dummy");
- final String TEST_NAME = "testHiveCommandLineHiveVariable";
final String SCRIPT_TEXT = "create table ${DUMMY_TBL} (d int);\nshow tables;\n";
final String EXPECTED_PATTERN = "dummy";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
@Test
public void testBeelineHiveConfVariable() throws Throwable {
- List<String> argList = getBaseArgs(JDBC_URL);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
argList.add("--hiveconf");
- argList.add("hive.table.name=dummy");
- final String TEST_NAME = "testBeelineHiveConfVariable";
- final String SCRIPT_TEXT = "create table ${hiveconf:hive.table.name} (d int);\nshow tables;\n";
+ argList.add("test.hive.table.name=dummy");
+ final String SCRIPT_TEXT = "create table ${hiveconf:test.hive.table.name} (d int);\nshow tables;\n";
final String EXPECTED_PATTERN = "dummy";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
@@ -274,7 +252,7 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testBeelineMultiHiveVariable() throws Throwable {
- List<String> argList = getBaseArgs(JDBC_URL);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
argList.add("--hivevar");
argList.add("TABLE_NAME=dummy2");
@@ -289,10 +267,9 @@ public class TestBeeLineWithArgs {
argList.add("--hiveconf");
argList.add("COLUMN_TYPE=int");
- final String TEST_NAME = "testHiveCommandLineHiveVariable";
final String SCRIPT_TEXT = "${COMMAND} ${OBJECT} ${TABLE_NAME} (${hiveconf:COLUMN_NAME} ${hiveconf:COLUMN_TYPE});\nshow tables;\n";
final String EXPECTED_PATTERN = "dummy2";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
@@ -302,20 +279,18 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testBreakOnErrorScriptFile() throws Throwable {
- List<String> argList = getBaseArgs(JDBC_URL);
- final String TEST_NAME = "testBreakOnErrorScriptFile";
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
final String SCRIPT_TEXT = "select * from abcdefg01;\nshow databases;\n";
final String EXPECTED_PATTERN = " default ";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, false, argList);
}
@Test
public void testBeelineShellCommand() throws Throwable {
- List<String> argList = getBaseArgs(JDBC_URL);
- final String TEST_NAME = "testBeelineShellCommand";
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
final String SCRIPT_TEXT = "!sh echo \"hello world.\" > hw.txt\n!sh cat hw.txt\n!rm hw.txt";
final String EXPECTED_PATTERN = "hello world";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
@@ -324,11 +299,10 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testNullDefault() throws Throwable {
- final String TEST_NAME = "testNullDefault";
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"select null from " + tableName + " limit 1 ;\n";
final String EXPECTED_PATTERN = "NULL";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL()));
}
/**
@@ -337,39 +311,123 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testNullNonEmpty() throws Throwable {
- final String TEST_NAME = "testNullNonDefault";
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"!set nullemptystring false\n select null from " + tableName + " limit 1 ;\n";
final String EXPECTED_PATTERN = "NULL";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL()));
}
@Test
public void testGetVariableValue() throws Throwable {
- final String TEST_NAME = "testGetVariableValue";
final String SCRIPT_TEXT = "set env:TERM;";
final String EXPECTED_PATTERN = "env:TERM";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL()));
}
/**
* Select null from table , check if setting null to empty string works.
- * Original beeline/sqlline used to print nulls as empty strings
+ * Original beeline/sqlline used to print nulls as empty strings.
+ * Also test csv2 output format
* Print PASSED or FAILED
*/
@Test
public void testNullEmpty() throws Throwable {
- final String TEST_NAME = "testNullNonDefault";
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"!set nullemptystring true\n select 'abc',null,'def' from " + tableName + " limit 1 ;\n";
final String EXPECTED_PATTERN = "abc,,def";
- List<String> argList = getBaseArgs(JDBC_URL);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ argList.add("--outputformat=csv2");
+
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+ /**
+ * Test writing output using DSV format, with custom delimiter ";"
+ */
+ @Test
+ public void testDSVOutput() throws Throwable {
+ String SCRIPT_TEXT = getFormatTestQuery();
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ argList.add("--outputformat=dsv");
+ argList.add("--delimiterForDSV=;");
+
+ final String EXPECTED_PATTERN = "1;NULL;defg;\"ab\"\"c\";1.0";
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+ /**
+ * Test writing output using TSV (new) format
+ */
+ @Test
+ public void testTSV2Output() throws Throwable {
+ String SCRIPT_TEXT = getFormatTestQuery();
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ argList.add("--outputformat=tsv2");
+
+ final String EXPECTED_PATTERN = "1\tNULL\tdefg\t\"ab\"\"c\"\t1.0";
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+ /**
+ * Test writing output using TSV deprecated format
+ */
+ @Test
+ public void testTSVOutput() throws Throwable {
+ String SCRIPT_TEXT = getFormatTestQuery();
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ argList.add("--outputformat=tsv");
+
+ final String EXPECTED_PATTERN = "'1'\t'NULL'\t'defg'\t'ab\"c\'\t'1.0'";
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+
+ /**
+ * Test writing output using TSV deprecated format
+ * Check for deprecation message
+ */
+ @Test
+ public void testTSVOutputDeprecation() throws Throwable {
+ String SCRIPT_TEXT = getFormatTestQuery();
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ argList.add("--outputformat=tsv");
+
+ final String EXPECTED_PATTERN = "Format tsv is deprecated, please use tsv2";
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+ /**
+ * Test writing output using CSV deprecated format
+ * Check for deprecation message
+ */
+ @Test
+ public void testCSVOutputDeprecation() throws Throwable {
+ String SCRIPT_TEXT = getFormatTestQuery();
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
argList.add("--outputformat=csv");
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ final String EXPECTED_PATTERN = "Format csv is deprecated, please use csv2";
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ }
+
+ /**
+ * Test writing output using CSV deprecated format
+ */
+ @Test
+ public void testCSVOutput() throws Throwable {
+ String SCRIPT_TEXT = getFormatTestQuery();
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
+ argList.add("--outputformat=csv");
+ final String EXPECTED_PATTERN = "'1','NULL','defg','ab\"c\','1.0'";
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
+
+ private String getFormatTestQuery() {
+ return "set hive.support.concurrency = false;\n" +
+ "select 1, null, 'defg', 'ab\"c', 1.0D from " + tableName + " limit 1 ;\n";
+ }
/**
* Select null from table , check if setting null to empty string works - Using beeling cmd line
* argument.
@@ -378,49 +436,36 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testNullEmptyCmdArg() throws Throwable {
- final String TEST_NAME = "testNullNonDefault";
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"select 'abc',null,'def' from " + tableName + " limit 1 ;\n";
- //final String EXPECTED_PATTERN = "| abc | | def |";
- final String EXPECTED_PATTERN = "abc,,def";
+ final String EXPECTED_PATTERN = "'abc','','def'";
- List<String> argList = getBaseArgs(JDBC_URL);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
argList.add("--nullemptystring=true");
argList.add("--outputformat=csv");
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
* Attempt to execute a missing script file with the -f option to BeeLine
- * Print PASSED or FAILED
*/
@Test
public void testNegativeScriptFile() throws Throwable {
- final String TEST_NAME = "testNegativeScriptFile";
final String EXPECTED_PATTERN = " default ";
- long startTime = System.currentTimeMillis();
- System.out.println(">>> STARTED " + TEST_NAME);
-
// Create and delete a temp file
File scriptFile = File.createTempFile("beelinenegative", "temp");
scriptFile.delete();
- List<String> argList = getBaseArgs(JDBC_URL);
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL());
argList.add("-f");
argList.add(scriptFile.getAbsolutePath());
try {
- String output = testCommandLineScript(argList, null);
- long elapsedTime = (System.currentTimeMillis() - startTime)/1000;
- String time = "(" + elapsedTime + "s)";
+ String output = testCommandLineScript(argList, null);
if (output.contains(EXPECTED_PATTERN)) {
- System.err.println("Output: " + output);
- System.err.println(">>> FAILED " + TEST_NAME + " (ERROR) " + time);
- fail(TEST_NAME);
- } else {
- System.out.println(">>> PASSED " + TEST_NAME + " " + time);
+ fail("Output: " + output + " Negative pattern: " + EXPECTED_PATTERN);
}
} catch (Throwable e) {
e.printStackTrace();
@@ -456,11 +501,10 @@ public class TestBeeLineWithArgs {
@Test
public void testHiveVarSubstitution() throws Throwable {
- List<String> argList = getBaseArgs(JDBC_URL + "#D_TBL=dummy_t");
- final String TEST_NAME = "testHiveVarSubstitution";
+ List<String> argList = getBaseArgs(miniHS2.getBaseJdbcURL() + "#D_TBL=dummy_t");
final String SCRIPT_TEXT = "create table ${D_TBL} (d int);\nshow tables;\n";
final String EXPECTED_PATTERN = "dummy_t";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
@Test
@@ -469,13 +513,12 @@ public class TestBeeLineWithArgs {
List<String> argList = getBaseArgs(embeddedJdbcURL);
argList.add("--hivevar");
argList.add("DUMMY_TBL=embedded_table");
- final String TEST_NAME = "testEmbeddedBeelineConnection";
// Set to non-zk lock manager to avoid trying to connect to zookeeper
final String SCRIPT_TEXT =
"set hive.lock.manager=org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager;\n" +
"create table ${DUMMY_TBL} (d int);\nshow tables;\n";
final String EXPECTED_PATTERN = "embedded_table";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
/**
@@ -484,11 +527,10 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testQueryProgress() throws Throwable {
- final String TEST_NAME = "testQueryProgress";
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"select count(*) from " + tableName + ";\n";
final String EXPECTED_PATTERN = "Parsing command";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL()));
}
/**
@@ -497,11 +539,10 @@ public class TestBeeLineWithArgs {
*/
@Test
public void testQueryProgressHidden() throws Throwable {
- final String TEST_NAME = "testQueryProgress";
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"!set silent true\n" +
"select count(*) from " + tableName + ";\n";
final String EXPECTED_PATTERN = "Parsing command";
- testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, getBaseArgs(JDBC_URL));
+ testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, false, getBaseArgs(miniHS2.getBaseJdbcURL()));
}
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestSchemaTool.java Thu Oct 30 16:22:33 2014
@@ -19,9 +19,12 @@
package org.apache.hive.beeline;
import java.io.BufferedWriter;
+import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
import java.util.Random;
import junit.framework.TestCase;
@@ -31,14 +34,14 @@ import org.apache.commons.lang.StringUti
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaException;
import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
-import org.apache.hive.beeline.HiveSchemaHelper;
import org.apache.hive.beeline.HiveSchemaHelper.NestedScriptParser;
-import org.apache.hive.beeline.HiveSchemaTool;
public class TestSchemaTool extends TestCase {
private HiveSchemaTool schemaTool;
private HiveConf hiveConf;
private String testMetastoreDB;
+ private PrintStream errStream;
+ private PrintStream outStream;
@Override
protected void setUp() throws Exception {
@@ -48,8 +51,11 @@ public class TestSchemaTool extends Test
System.setProperty(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname,
"jdbc:derby:" + testMetastoreDB + ";create=true");
hiveConf = new HiveConf(this.getClass());
- schemaTool = new HiveSchemaTool(System.getProperty("test.tmp.dir"), hiveConf, "derby");
+ schemaTool = new HiveSchemaTool(
+ System.getProperty("test.tmp.dir", "target/tmp"), hiveConf, "derby");
System.setProperty("beeLine.system.exit", "true");
+ errStream = System.err;
+ outStream = System.out;
}
@Override
@@ -58,6 +64,8 @@ public class TestSchemaTool extends Test
if (metaStoreDir.exists()) {
FileUtils.forceDeleteOnExit(metaStoreDir);
}
+ System.setOut(outStream);
+ System.setErr(errStream);
}
/**
@@ -121,12 +129,42 @@ public class TestSchemaTool extends Test
foundException = true;
}
if (!foundException) {
- throw new Exception("Hive operations shouldn't pass with older version schema");
+ throw new Exception(
+ "Hive operations shouldn't pass with older version schema");
}
- // upgrade schema from 0.7.0 to latest
+ // Generate dummy pre-upgrade script with errors
+ String invalidPreUpgradeScript = writeDummyPreUpgradeScript(
+ 0, "upgrade-0.11.0-to-0.12.0.derby.sql", "foo bar;");
+ // Generate dummy pre-upgrade scripts with valid SQL
+ String validPreUpgradeScript0 = writeDummyPreUpgradeScript(
+ 0, "upgrade-0.12.0-to-0.13.0.derby.sql",
+ "CREATE TABLE schema_test0 (id integer);");
+ String validPreUpgradeScript1 = writeDummyPreUpgradeScript(
+ 1, "upgrade-0.12.0-to-0.13.0.derby.sql",
+ "CREATE TABLE schema_test1 (id integer);");
+
+ // Capture system out and err
+ schemaTool.setVerbose(true);
+ OutputStream stderr = new ByteArrayOutputStream();
+ PrintStream errPrintStream = new PrintStream(stderr);
+ System.setErr(errPrintStream);
+ OutputStream stdout = new ByteArrayOutputStream();
+ PrintStream outPrintStream = new PrintStream(stdout);
+ System.setOut(outPrintStream);
+
+ // Upgrade schema from 0.7.0 to latest
schemaTool.doUpgrade("0.7.0");
- // verify that driver works fine with latest schema
+
+ // Verify that the schemaTool ran pre-upgrade scripts and ignored errors
+ assertTrue(stderr.toString().contains(invalidPreUpgradeScript));
+ assertTrue(stderr.toString().contains("foo"));
+ assertFalse(stderr.toString().contains(validPreUpgradeScript0));
+ assertFalse(stderr.toString().contains(validPreUpgradeScript1));
+ assertTrue(stdout.toString().contains(validPreUpgradeScript0));
+ assertTrue(stdout.toString().contains(validPreUpgradeScript1));
+
+ // Verify that driver works fine with latest schema
schemaTool.verifySchemaVersion();
}
@@ -152,9 +190,9 @@ public class TestSchemaTool extends Test
String expectedSQL = StringUtils.join(resultScript, System.getProperty("line.separator")) +
System.getProperty("line.separator");
File testScriptFile = generateTestScript(testScript);
- String flattenedSql = HiveSchemaTool.buildCommand(
- HiveSchemaHelper.getDbCommandParser("derby"),
- testScriptFile.getParentFile().getPath(), testScriptFile.getName());
+ String flattenedSql = HiveSchemaHelper.getDbCommandParser("derby")
+ .buildCommand(testScriptFile.getParentFile().getPath(),
+ testScriptFile.getName());
assertEquals(expectedSQL, flattenedSql);
}
@@ -194,9 +232,9 @@ public class TestSchemaTool extends Test
};
File testScriptFile = generateTestScript(parentTestScript);
- String flattenedSql = HiveSchemaTool.buildCommand(
- HiveSchemaHelper.getDbCommandParser("derby"),
- testScriptFile.getParentFile().getPath(), testScriptFile.getName());
+ String flattenedSql = HiveSchemaHelper.getDbCommandParser("derby")
+ .buildCommand(testScriptFile.getParentFile().getPath(),
+ testScriptFile.getName());
assertFalse(flattenedSql.contains("RUN"));
assertFalse(flattenedSql.contains("comment"));
assertTrue(flattenedSql.contains(childTab1));
@@ -239,9 +277,9 @@ public class TestSchemaTool extends Test
};
File testScriptFile = generateTestScript(parentTestScript);
- String flattenedSql = HiveSchemaTool.buildCommand(
- HiveSchemaHelper.getDbCommandParser("mysql"),
- testScriptFile.getParentFile().getPath(), testScriptFile.getName());
+ String flattenedSql = HiveSchemaHelper.getDbCommandParser("mysql")
+ .buildCommand(testScriptFile.getParentFile().getPath(),
+ testScriptFile.getName());
assertFalse(flattenedSql.contains("RUN"));
assertFalse(flattenedSql.contains("comment"));
assertTrue(flattenedSql.contains(childTab1));
@@ -282,8 +320,8 @@ public class TestSchemaTool extends Test
System.getProperty("line.separator");
File testScriptFile = generateTestScript(testScript);
NestedScriptParser testDbParser = HiveSchemaHelper.getDbCommandParser("mysql");
- String flattenedSql = HiveSchemaTool.buildCommand(testDbParser,
- testScriptFile.getParentFile().getPath(), testScriptFile.getName());
+ String flattenedSql = testDbParser.buildCommand(testScriptFile.getParentFile().getPath(),
+ testScriptFile.getName());
assertEquals(expectedSQL, flattenedSql);
}
@@ -317,8 +355,8 @@ public class TestSchemaTool extends Test
System.getProperty("line.separator");
File testScriptFile = generateTestScript(testScript);
NestedScriptParser testDbParser = HiveSchemaHelper.getDbCommandParser("mysql");
- String flattenedSql = HiveSchemaTool.buildCommand(testDbParser,
- testScriptFile.getParentFile().getPath(), testScriptFile.getName());
+ String flattenedSql = testDbParser.buildCommand(testScriptFile.getParentFile().getPath(),
+ testScriptFile.getName());
assertEquals(expectedSQL, flattenedSql);
}
@@ -358,9 +396,9 @@ public class TestSchemaTool extends Test
};
File testScriptFile = generateTestScript(parentTestScript);
- String flattenedSql = HiveSchemaTool.buildCommand(
- HiveSchemaHelper.getDbCommandParser("oracle"),
- testScriptFile.getParentFile().getPath(), testScriptFile.getName());
+ String flattenedSql = HiveSchemaHelper.getDbCommandParser("oracle")
+ .buildCommand(testScriptFile.getParentFile().getPath(),
+ testScriptFile.getName());
assertFalse(flattenedSql.contains("@"));
assertFalse(flattenedSql.contains("comment"));
assertTrue(flattenedSql.contains(childTab1));
@@ -380,4 +418,21 @@ public class TestSchemaTool extends Test
out.close();
return testScriptFile;
}
-}
+
+ /**
+ * Write out a dummy pre-upgrade script with given SQL statement.
+ */
+ private String writeDummyPreUpgradeScript(int index, String upgradeScriptName,
+ String sql) throws Exception {
+ String preUpgradeScript = "pre-" + index + "-" + upgradeScriptName;
+ String dummyPreScriptPath = System.getProperty("test.tmp.dir", "target/tmp") +
+ File.separatorChar + "scripts" + File.separatorChar + "metastore" +
+ File.separatorChar + "upgrade" + File.separatorChar + "derby" +
+ File.separatorChar + preUpgradeScript;
+ FileWriter fstream = new FileWriter(dummyPreScriptPath);
+ BufferedWriter out = new BufferedWriter(fstream);
+ out.write(sql + System.getProperty("line.separator") + ";");
+ out.close();
+ return preUpgradeScript;
+ }
+}
\ No newline at end of file
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Thu Oct 30 16:22:33 2014
@@ -39,6 +39,7 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
+import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -417,50 +418,18 @@ public class TestJdbcDriver2 {
///////////////////////////////////////////////
//////////////////// correct testcase
+ //////////////////// executed twice: once with the typed ps setters, once with the generic setObject
//////////////////////////////////////////////
try {
- PreparedStatement ps = con.prepareStatement(sql);
-
- ps.setBoolean(1, true);
- ps.setBoolean(2, true);
-
- ps.setShort(3, Short.valueOf("1"));
- ps.setInt(4, 2);
- ps.setFloat(5, 3f);
- ps.setDouble(6, Double.valueOf(4));
- ps.setString(7, "test'string\"");
- ps.setLong(8, 5L);
- ps.setByte(9, (byte) 1);
- ps.setByte(10, (byte) 1);
- ps.setString(11, "2012-01-01");
-
- ps.setMaxRows(2);
-
- assertTrue(true);
-
+ PreparedStatement ps = createPreapredStatementUsingSetXXX(sql);
ResultSet res = ps.executeQuery();
- assertNotNull(res);
-
- while (res.next()) {
- assertEquals("2011-03-25", res.getString("ddate"));
- assertEquals("10", res.getString("num"));
- assertEquals((byte) 10, res.getByte("num"));
- assertEquals("2011-03-25", res.getDate("ddate").toString());
- assertEquals(Double.valueOf(10).doubleValue(), res.getDouble("num"), 0.1);
- assertEquals(10, res.getInt("num"));
- assertEquals(Short.valueOf("10").shortValue(), res.getShort("num"));
- assertEquals(10L, res.getLong("num"));
- assertEquals(true, res.getBoolean("bv"));
- Object o = res.getObject("ddate");
- assertNotNull(o);
- o = res.getObject("num");
- assertNotNull(o);
- }
- res.close();
- assertTrue(true);
+ assertPreparedStatementResultAsExpected(res);
+ ps.close();
+ ps = createPreapredStatementUsingSetObject(sql);
+ res = ps.executeQuery();
+ assertPreparedStatementResultAsExpected(res);
ps.close();
- assertTrue(true);
} catch (Exception e) {
e.printStackTrace();
@@ -515,6 +484,82 @@ public class TestJdbcDriver2 {
assertNotNull(
"Execute the invalid setted sql statement should throw exception",
expectedException);
+
+ // setObject to the yet unknown type java.util.Date
+ expectedException = null;
+ try {
+ PreparedStatement ps = con.prepareStatement(sql);
+ ps.setObject(1, new Date());
+ ps.executeQuery();
+ } catch (Exception e) {
+ expectedException = e;
+ }
+ assertNotNull(
+ "Setting to an unknown type should throw an exception",
+ expectedException);
+
+ }
+
+ private PreparedStatement createPreapredStatementUsingSetObject(String sql) throws SQLException {
+ PreparedStatement ps = con.prepareStatement(sql);
+
+ ps.setObject(1, true); //setBoolean
+ ps.setObject(2, true); //setBoolean
+
+ ps.setObject(3, Short.valueOf("1")); //setShort
+ ps.setObject(4, 2); //setInt
+ ps.setObject(5, 3f); //setFloat
+ ps.setObject(6, Double.valueOf(4)); //setDouble
+ ps.setObject(7, "test'string\""); //setString
+ ps.setObject(8, 5L); //setLong
+ ps.setObject(9, (byte) 1); //setByte
+ ps.setObject(10, (byte) 1); //setByte
+ ps.setString(11, "2012-01-01"); //setString
+
+ ps.setMaxRows(2);
+ return ps;
+ }
+
+ private PreparedStatement createPreapredStatementUsingSetXXX(String sql) throws SQLException {
+ PreparedStatement ps = con.prepareStatement(sql);
+
+ ps.setBoolean(1, true); //setBoolean
+ ps.setBoolean(2, true); //setBoolean
+
+ ps.setShort(3, Short.valueOf("1")); //setShort
+ ps.setInt(4, 2); //setInt
+ ps.setFloat(5, 3f); //setFloat
+ ps.setDouble(6, Double.valueOf(4)); //setDouble
+ ps.setString(7, "test'string\""); //setString
+ ps.setLong(8, 5L); //setLong
+ ps.setByte(9, (byte) 1); //setByte
+ ps.setByte(10, (byte) 1); //setByte
+ ps.setString(11, "2012-01-01"); //setString
+
+ ps.setMaxRows(2);
+ return ps;
+ }
+
+ private void assertPreparedStatementResultAsExpected(ResultSet res ) throws SQLException {
+ assertNotNull(res);
+
+ while (res.next()) {
+ assertEquals("2011-03-25", res.getString("ddate"));
+ assertEquals("10", res.getString("num"));
+ assertEquals((byte) 10, res.getByte("num"));
+ assertEquals("2011-03-25", res.getDate("ddate").toString());
+ assertEquals(Double.valueOf(10).doubleValue(), res.getDouble("num"), 0.1);
+ assertEquals(10, res.getInt("num"));
+ assertEquals(Short.valueOf("10").shortValue(), res.getShort("num"));
+ assertEquals(10L, res.getLong("num"));
+ assertEquals(true, res.getBoolean("bv"));
+ Object o = res.getObject("ddate");
+ assertNotNull(o);
+ o = res.getObject("num");
+ assertNotNull(o);
+ }
+ res.close();
+ assertTrue(true);
}
/**
@@ -1682,29 +1727,24 @@ public class TestJdbcDriver2 {
}
private static final String[][] HTTP_URL_PROPERTIES = new String[][] {
- {"jdbc:hive2://server:10002/db;" +
- "user=foo;password=bar?" +
- "hive.server2.transport.mode=http;" +
- "hive.server2.thrift.http.path=hs2", "server", "10002", "db", "http", "hs2"},
- {"jdbc:hive2://server:10000/testdb;" +
- "user=foo;password=bar?" +
- "hive.server2.transport.mode=binary;" +
- "hive.server2.thrift.http.path=", "server", "10000", "testdb", "binary", ""},
- };
-
- @Test
- public void testParseUrlHttpMode() throws SQLException, JdbcUriParseException,
- ZooKeeperHiveClientException {
- new HiveDriver();
- for (String[] testValues : HTTP_URL_PROPERTIES) {
- JdbcConnectionParams params = Utils.parseURL(testValues[0]);
- assertEquals(params.getHost(), testValues[1]);
- assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
- assertEquals(params.getDbName(), testValues[3]);
- assertEquals(params.getHiveConfs().get("hive.server2.transport.mode"), testValues[4]);
- assertEquals(params.getHiveConfs().get("hive.server2.thrift.http.path"), testValues[5]);
- }
+ { "jdbc:hive2://server:10002/db;user=foo;password=bar;transportMode=http;httpPath=hs2",
+ "server", "10002", "db", "http", "hs2" },
+ { "jdbc:hive2://server:10000/testdb;user=foo;password=bar;transportMode=binary;httpPath=",
+ "server", "10000", "testdb", "binary", "" }, };
+
+@Test
+public void testParseUrlHttpMode() throws SQLException, JdbcUriParseException,
+ ZooKeeperHiveClientException {
+ new HiveDriver();
+ for (String[] testValues : HTTP_URL_PROPERTIES) {
+ JdbcConnectionParams params = Utils.parseURL(testValues[0]);
+ assertEquals(params.getHost(), testValues[1]);
+ assertEquals(params.getPort(), Integer.parseInt(testValues[2]));
+ assertEquals(params.getDbName(), testValues[3]);
+ assertEquals(params.getSessionVars().get("transportMode"), testValues[4]);
+ assertEquals(params.getSessionVars().get("httpPath"), testValues[5]);
}
+}
private static void assertDpi(DriverPropertyInfo dpi, String name,
String value) {
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java Thu Oct 30 16:22:33 2014
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertEqu
import static org.junit.Assert.fail;
import java.io.File;
+import java.net.URLEncoder;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
@@ -55,8 +56,8 @@ public class TestSSL {
private Connection hs2Conn = null;
private String dataFileDir = conf.get("test.data.files");
private Map<String, String> confOverlay;
- private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + dataFileDir + File.separator +
- TRUST_STORE_NAME + ";trustStorePassword=" + KEY_STORE_PASSWORD;
+ private final String SSL_CONN_PARAMS = ";ssl=true;sslTrustStore=" + URLEncoder.encode(dataFileDir + File.separator +
+ TRUST_STORE_NAME) + ";trustStorePassword=" + KEY_STORE_PASSWORD;
@BeforeClass
public static void beforeTest() throws Exception {
Modified: hive/branches/spark/itests/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/pom.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/pom.xml (original)
+++ hive/branches/spark/itests/pom.xml Thu Oct 30 16:22:33 2014
@@ -19,7 +19,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
- <version>0.14.0-SNAPSHOT</version>
+ <version>0.15.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
Modified: hive/branches/spark/itests/qtest/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/qtest/pom.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/qtest/pom.xml (original)
+++ hive/branches/spark/itests/qtest/pom.xml Thu Oct 30 16:22:33 2014
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive-it</artifactId>
- <version>0.14.0-SNAPSHOT</version>
+ <version>0.15.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
Modified: hive/branches/spark/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/src/test/resources/testconfiguration.properties?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/spark/itests/src/test/resources/testconfiguration.properties Thu Oct 30 16:22:33 2014
@@ -127,6 +127,7 @@ minitez.query.files.shared=alter_merge_2
script_pipe.q,\
scriptfile1.q,\
select_dummy_source.q,\
+ skewjoin.q,\
stats_counter.q,\
stats_counter_partitioned.q,\
stats_noscan_1.q,\
@@ -156,6 +157,7 @@ minitez.query.files.shared=alter_merge_2
update_where_partitioned.q,\
update_two_cols.q,\
vector_between_in.q,\
+ vector_bucket.q,\
vector_cast_constant.q,\
vector_char_4.q,\
vector_char_simple.q,\
@@ -165,10 +167,13 @@ minitez.query.files.shared=alter_merge_2
vector_distinct_2.q,\
vector_elt.q,\
vector_groupby_3.q,\
+ vector_groupby_reduce.q,\
vector_left_outer_join.q,\
vector_mapjoin_reduce.q,\
vector_non_string_partition.q,\
vector_orderby_5.q,\
+ vector_partitioned_date_time.q,\
+ vector_reduce_groupby_decimal.q,\
vector_string_concat.q,\
vector_varchar_4.q,\
vector_varchar_simple.q,\
@@ -243,6 +248,7 @@ minitez.query.files=bucket_map_join_tez1
tez_union_decimal.q,\
tez_union_group_by.q,\
tez_smb_main.q,\
+ tez_smb_1.q,\
vectorized_dynamic_partition_pruning.q
beeline.positive.exclude=add_part_exist.q,\
Modified: hive/branches/spark/itests/test-serde/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/test-serde/pom.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/test-serde/pom.xml (original)
+++ hive/branches/spark/itests/test-serde/pom.xml Thu Oct 30 16:22:33 2014
@@ -19,7 +19,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive-it</artifactId>
- <version>0.14.0-SNAPSHOT</version>
+ <version>0.15.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
Modified: hive/branches/spark/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java (original)
+++ hive/branches/spark/itests/test-serde/src/main/java/org/apache/hadoop/hive/serde2/TestSerDe.java Thu Oct 30 16:22:33 2014
@@ -27,6 +27,7 @@ import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.MetadataListStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -44,10 +45,17 @@ import com.google.common.collect.Lists;
* TestSerDe.
*
*/
+@SerDeSpec(schemaProps = {
+ serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+ TestSerDe.COLUMNS, TestSerDe.COLUMNS_COMMENTS, TestSerDe.DEFAULT_SERIALIZATION_FORMAT})
public class TestSerDe extends AbstractSerDe {
public static final Log LOG = LogFactory.getLog(TestSerDe.class.getName());
+ public static final String COLUMNS = "columns";
+ public static final String COLUMNS_COMMENTS = "columns.comments";
+ public static final String DEFAULT_SERIALIZATION_FORMAT = "testserde.default.serialization.format";
+
public String getShortName() {
return shortName();
}
@@ -76,7 +84,7 @@ public class TestSerDe extends AbstractS
@Override
public void initialize(Configuration job, Properties tbl) throws SerDeException {
separator = DefaultSeparator;
- String altSep = tbl.getProperty("testserde.default.serialization.format");
+ String altSep = tbl.getProperty(DEFAULT_SERIALIZATION_FORMAT);
if (altSep != null && altSep.length() > 0) {
try {
byte[] b = new byte[1];
@@ -87,7 +95,7 @@ public class TestSerDe extends AbstractS
}
}
- String columnProperty = tbl.getProperty("columns");
+ String columnProperty = tbl.getProperty(COLUMNS);
if (columnProperty == null || columnProperty.length() == 0) {
// Hack for tables with no columns
// Treat it as a table with a single column called "col"
@@ -97,7 +105,7 @@ public class TestSerDe extends AbstractS
} else {
columnNames = Arrays.asList(columnProperty.split(","));
cachedObjectInspector = MetadataListStructObjectInspector
- .getInstance(columnNames,Lists.newArrayList(Splitter.on('\0').split(tbl.getProperty("columns.comments"))));
+ .getInstance(columnNames,Lists.newArrayList(Splitter.on('\0').split(tbl.getProperty(COLUMNS_COMMENTS))));
}
LOG.info(getClass().getName() + ": initialized with columnNames: "
+ columnNames);
Modified: hive/branches/spark/itests/util/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/pom.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/util/pom.xml (original)
+++ hive/branches/spark/itests/util/pom.xml Thu Oct 30 16:22:33 2014
@@ -19,7 +19,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive-it</artifactId>
- <version>0.14.0-SNAPSHOT</version>
+ <version>0.15.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Thu Oct 30 16:22:33 2014
@@ -305,7 +305,7 @@ public class QTestUtil {
this.outDir = outDir;
this.logDir = logDir;
if (confDir != null && !confDir.isEmpty()) {
- HiveConf.setHiveSiteLocation(new URL("file://"+confDir+"/hive-site.xml"));
+ HiveConf.setHiveSiteLocation(new URL("file://"+ new File(confDir).toURI().getPath() + "/hive-site.xml"));
System.out.println("Setting hive-site: "+HiveConf.getHiveSiteLocation());
}
conf = new HiveConf(Driver.class);
@@ -559,7 +559,7 @@ public class QTestUtil {
List<Index> indexes = db.getIndexes(dbName, tblName, (short)-1);
if (indexes != null && indexes.size() > 0) {
for (Index index : indexes) {
- db.dropIndex(dbName, tblName, index.getIndexName(), true);
+ db.dropIndex(dbName, tblName, index.getIndexName(), true, true);
}
}
}
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessControllerForTest.java Thu Oct 30 16:22:33 2014
@@ -39,12 +39,11 @@ public class SQLStdHiveAccessControllerF
@Override
- public void applyAuthorizationConfigPolicy(HiveConf hiveConf) {
+ public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
super.applyAuthorizationConfigPolicy(hiveConf);
// remove restrictions on the variables that can be set using set command
- hiveConf.setIsModWhiteListEnabled(false);
-
+ hiveConf.setModifiableWhiteListRegex(".*");
}
}
Modified: hive/branches/spark/jdbc/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/pom.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/jdbc/pom.xml (original)
+++ hive/branches/spark/jdbc/pom.xml Thu Oct 30 16:22:33 2014
@@ -19,7 +19,7 @@
<parent>
<groupId>org.apache.hive</groupId>
<artifactId>hive</artifactId>
- <version>0.14.0-SNAPSHOT</version>
+ <version>0.15.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Thu Oct 30 16:22:33 2014
@@ -218,7 +218,7 @@ public class HiveConnection implements j
String schemeName = useSsl ? "https" : "http";
// http path should begin with "/"
String httpPath;
- httpPath = hiveConfMap.get(JdbcConnectionParams.HTTP_PATH);
+ httpPath = sessConfMap.get(JdbcConnectionParams.HTTP_PATH);
if (httpPath == null) {
httpPath = "/";
} else if (!httpPath.startsWith("/")) {
@@ -258,15 +258,12 @@ public class HiveConnection implements j
HttpRequestInterceptor requestInterceptor;
// If Kerberos
if (isKerberosAuthMode()) {
- if (useSsl) {
- String msg = "SSL encryption is currently not supported with " +
- "kerberos authentication";
- throw new SQLException(msg, " 08S01");
- }
/**
* Add an interceptor which sets the appropriate header in the request.
* It does the kerberos authentication and get the final service ticket,
* for sending to the server before every request.
+ * In https mode, the entire information is encrypted
+ * TODO: Optimize this with a mix of kerberos + using cookie.
*/
requestInterceptor = new HttpKerberosRequestInterceptor(
sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, getServerHttpUrl(false));
@@ -277,46 +274,46 @@ public class HiveConnection implements j
* In https mode, the entire information is encrypted
*/
requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword());
- // Configure httpClient for SSL
- if (useSsl) {
- String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
- String sslTrustStorePassword = sessConfMap.get(
- JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
- KeyStore sslTrustStore;
- SSLSocketFactory socketFactory;
- /**
- * The code within the try block throws:
- * 1. SSLInitializationException
- * 2. KeyStoreException
- * 3. IOException
- * 4. NoSuchAlgorithmException
- * 5. CertificateException
- * 6. KeyManagementException
- * 7. UnrecoverableKeyException
- * We don't want the client to retry on any of these, hence we catch all
- * and throw a SQLException.
- */
- try {
- if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) {
- // Create a default socket factory based on standard JSSE trust material
- socketFactory = SSLSocketFactory.getSocketFactory();
- }
- else {
- // Pick trust store config from the given path
- sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE);
- sslTrustStore.load(new FileInputStream(sslTrustStorePath),
- sslTrustStorePassword.toCharArray());
- socketFactory = new SSLSocketFactory(sslTrustStore);
- }
- socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
- Scheme sslScheme = new Scheme("https", 443, socketFactory);
- httpClient.getConnectionManager().getSchemeRegistry().register(sslScheme);
+ }
+ // Configure httpClient for SSL
+ if (useSsl) {
+ String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
+ String sslTrustStorePassword = sessConfMap.get(
+ JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
+ KeyStore sslTrustStore;
+ SSLSocketFactory socketFactory;
+ /**
+ * The code within the try block throws:
+ * 1. SSLInitializationException
+ * 2. KeyStoreException
+ * 3. IOException
+ * 4. NoSuchAlgorithmException
+ * 5. CertificateException
+ * 6. KeyManagementException
+ * 7. UnrecoverableKeyException
+ * We don't want the client to retry on any of these, hence we catch all
+ * and throw a SQLException.
+ */
+ try {
+ if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) {
+ // Create a default socket factory based on standard JSSE trust material
+ socketFactory = SSLSocketFactory.getSocketFactory();
}
- catch (Exception e) {
- String msg = "Could not create an https connection to " +
- jdbcUriString + ". " + e.getMessage();
- throw new SQLException(msg, " 08S01", e);
+ else {
+ // Pick trust store config from the given path
+ sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE);
+ sslTrustStore.load(new FileInputStream(sslTrustStorePath),
+ sslTrustStorePassword.toCharArray());
+ socketFactory = new SSLSocketFactory(sslTrustStore);
}
+ socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
+ Scheme sslScheme = new Scheme("https", 443, socketFactory);
+ httpClient.getConnectionManager().getSchemeRegistry().register(sslScheme);
+ }
+ catch (Exception e) {
+ String msg = "Could not create an https connection to " +
+ jdbcUriString + ". " + e.getMessage();
+ throw new SQLException(msg, " 08S01", e);
}
}
httpClient.addRequestInterceptor(requestInterceptor);
@@ -443,6 +440,12 @@ public class HiveConnection implements j
}
openReq.setConfiguration(openConf);
+ // Store the user name in the open request in case no non-sasl authentication
+ if (JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) {
+ openReq.setUsername(sessConfMap.get(JdbcConnectionParams.AUTH_USER));
+ openReq.setPassword(sessConfMap.get(JdbcConnectionParams.AUTH_PASSWD));
+ }
+
try {
TOpenSessionResp openResp = client.OpenSession(openReq);
@@ -485,7 +488,7 @@ public class HiveConnection implements j
}
private boolean isHttpTransportMode() {
- String transportMode = hiveConfMap.get(JdbcConnectionParams.TRANSPORT_MODE);
+ String transportMode = sessConfMap.get(JdbcConnectionParams.TRANSPORT_MODE);
if(transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
return true;
}
Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java Thu Oct 30 16:22:33 2014
@@ -37,6 +37,8 @@ import java.sql.SQLException;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
+import java.sql.Types;
+import java.text.MessageFormat;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Scanner;
@@ -564,8 +566,7 @@ public class HivePreparedStatement exten
*/
public void setNull(int parameterIndex, int sqlType) throws SQLException {
- // TODO Auto-generated method stub
- throw new SQLException("Method not supported");
+ this.parameters.put(parameterIndex, "NULL");
}
/*
@@ -575,8 +576,7 @@ public class HivePreparedStatement exten
*/
public void setNull(int paramIndex, int sqlType, String typeName) throws SQLException {
- // TODO Auto-generated method stub
- throw new SQLException("Method not supported");
+ this.parameters.put(paramIndex, "NULL");
}
/*
@@ -586,8 +586,38 @@ public class HivePreparedStatement exten
*/
public void setObject(int parameterIndex, Object x) throws SQLException {
- // TODO Auto-generated method stub
- throw new SQLException("Method not supported");
+ if (x == null) {
+ setNull(parameterIndex, Types.NULL);
+ } else if (x instanceof String) {
+ setString(parameterIndex, (String) x);
+ } else if (x instanceof Short) {
+ setShort(parameterIndex, ((Short) x).shortValue());
+ } else if (x instanceof Integer) {
+ setInt(parameterIndex, ((Integer) x).intValue());
+ } else if (x instanceof Long) {
+ setLong(parameterIndex, ((Long) x).longValue());
+ } else if (x instanceof Float) {
+ setFloat(parameterIndex, ((Float) x).floatValue());
+ } else if (x instanceof Double) {
+ setDouble(parameterIndex, ((Double) x).doubleValue());
+ } else if (x instanceof Boolean) {
+ setBoolean(parameterIndex, ((Boolean) x).booleanValue());
+ } else if (x instanceof Byte) {
+ setByte(parameterIndex, ((Byte) x).byteValue());
+ } else if (x instanceof Character) {
+ setString(parameterIndex, x.toString());
+ } else if (x instanceof Timestamp) {
+ setString(parameterIndex, x.toString());
+ } else if (x instanceof BigDecimal) {
+ setString(parameterIndex, x.toString());
+ } else {
+ // Can't infer a type.
+ throw new SQLException(
+ MessageFormat
+ .format(
+ "Can''t infer the SQL type to use for an instance of {0}. Use setObject() with an explicit Types value to specify the type to use.",
+ x.getClass().getName()));
+ }
}
/*