You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 06:00:54 UTC
svn commit: r1629563 [4/33] - in /hive/branches/spark: ./ accumulo-handler/
beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ bin/ext/ common/
common/src/java/org/apache/hadoop/hive/conf/
common/src/test/org/apache/hadoop/hive/common/type/ contri...
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java Mon Oct 6 04:00:39 2014
@@ -171,41 +171,36 @@ public class TestAuthorizationPreEventLi
driver.run("create database " + dbName);
listSize++;
- Database db = msc.getDatabase(dbName);
-
Database dbFromEvent = (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
+ Database db = msc.getDatabase(dbName);
validateCreateDb(db,dbFromEvent);
driver.run("use " + dbName);
driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
- listSize++;
- Table tbl = msc.getTable(dbName, tblName);
+ listSize = authCalls.size();
Table tblFromEvent = (
(org.apache.hadoop.hive.ql.metadata.Table)
assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE))
.getTTable();
+ Table tbl = msc.getTable(dbName, tblName);
validateCreateTable(tbl, tblFromEvent);
driver.run("alter table tmptbl add partition (b='2011')");
- listSize++;
- Partition part = msc.getPartition("hive3705", "tmptbl", "b=2011");
+ listSize = authCalls.size();
Partition ptnFromEvent = (
(org.apache.hadoop.hive.ql.metadata.Partition)
assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION))
.getTPartition();
+ Partition part = msc.getPartition("hive3705", "tmptbl", "b=2011");
validateAddPartition(part,ptnFromEvent);
driver.run(String.format("alter table %s touch partition (%s)", tblName, "b='2011'"));
- listSize++;
-
- //the partition did not change,
- // so the new partition should be similar to the original partition
- Partition modifiedP = msc.getPartition(dbName, tblName, "b=2011");
+ listSize = authCalls.size();
Partition ptnFromEventAfterAlter = (
(org.apache.hadoop.hive.ql.metadata.Partition)
@@ -213,6 +208,9 @@ public class TestAuthorizationPreEventLi
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.PARTITION))
.getTPartition();
+ //the partition did not change,
+ // so the new partition should be similar to the original partition
+ Partition modifiedP = msc.getPartition(dbName, tblName, "b=2011");
validateAlterPartition(part, modifiedP, ptnFromEventAfterAlter.getDbName(),
ptnFromEventAfterAlter.getTableName(), ptnFromEventAfterAlter.getValues(),
ptnFromEventAfterAlter);
@@ -220,8 +218,9 @@ public class TestAuthorizationPreEventLi
List<String> part_vals = new ArrayList<String>();
part_vals.add("c=2012");
- Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
+ listSize = authCalls.size();
+ Partition newPart = msc.appendPartition(dbName, tblName, part_vals);
listSize++;
Partition newPtnFromEvent = (
@@ -233,25 +232,23 @@ public class TestAuthorizationPreEventLi
driver.run(String.format("alter table %s rename to %s", tblName, renamed));
- listSize++;
+ listSize = authCalls.size();
- Table renamedTable = msc.getTable(dbName, renamed);
Table renamedTableFromEvent = (
(org.apache.hadoop.hive.ql.metadata.Table)
assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE))
.getTTable();
+ Table renamedTable = msc.getTable(dbName, renamed);
validateAlterTable(tbl, renamedTable, renamedTableFromEvent,
renamedTable);
assertFalse(tbl.getTableName().equals(renamedTable.getTableName()));
//change the table name back
driver.run(String.format("alter table %s rename to %s", renamed, tblName));
- listSize++;
-
driver.run(String.format("alter table %s drop partition (b='2011')", tblName));
- listSize++;
+ listSize = authCalls.size();
Partition ptnFromDropPartition = (
(org.apache.hadoop.hive.ql.metadata.Partition)
@@ -262,7 +259,7 @@ public class TestAuthorizationPreEventLi
validateDropPartition(modifiedP, ptnFromDropPartition);
driver.run("drop table " + tblName);
- listSize++;
+ listSize = authCalls.size();
Table tableFromDropTableEvent = (
(org.apache.hadoop.hive.ql.metadata.Table)
assertAndExtractSingleObjectFromEvent(listSize, authCalls,
@@ -290,16 +287,16 @@ public class TestAuthorizationPreEventLi
}
tCustom.setTableName(tbl.getTableName() + "_custom");
+ listSize = authCalls.size();
msc.createTable(tCustom);
listSize++;
- Table customCreatedTable = msc.getTable(tCustom.getDbName(), tCustom.getTableName());
Table customCreatedTableFromEvent = (
(org.apache.hadoop.hive.ql.metadata.Table)
assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.TABLE))
.getTTable();
-
+ Table customCreatedTable = msc.getTable(tCustom.getDbName(), tCustom.getTableName());
validateCreateTable(tCustom,customCreatedTable);
validateCreateTable(tCustom,customCreatedTableFromEvent);
@@ -316,8 +313,10 @@ public class TestAuthorizationPreEventLi
assertEquals(tCustom.getSd().getSerdeInfo().getSerializationLib(),
customCreatedTableFromEvent.getSd().getSerdeInfo().getSerializationLib());
- msc.dropTable(tCustom.getDbName(),tCustom.getTableName());
- listSize++;
+ listSize = authCalls.size();
+ msc.dropTable(tCustom.getDbName(), tCustom.getTableName());
+ listSize += 2;
+
Table table2FromDropTableEvent = (
(org.apache.hadoop.hive.ql.metadata.Table)
assertAndExtractSingleObjectFromEvent(listSize, authCalls,
@@ -327,7 +326,7 @@ public class TestAuthorizationPreEventLi
validateDropTable(tCustom, table2FromDropTableEvent);
driver.run("drop database " + dbName);
- listSize++;
+ listSize = authCalls.size();
Database dbFromDropDatabaseEvent =
(Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java Mon Oct 6 04:00:39 2014
@@ -89,6 +89,7 @@ public class TestMetastoreAuthorizationP
AuthorizationPreEventListener.class.getName());
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
getAuthorizationProvider());
+ setupMetaStoreReadAuthorization();
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
InjectableDummyAuthenticator.class.getName());
System.setProperty(HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS.varname, "");
@@ -115,6 +116,13 @@ public class TestMetastoreAuthorizationP
driver = new Driver(clientHiveConf);
}
+ protected void setupMetaStoreReadAuthorization() {
+ // read authorization does not work with default/legacy authorization mode
+ // It is a chicken and egg problem granting select privilege to database, as the
+ // grant statement would invoke get_database which needs select privilege
+ System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_AUTH_READS.varname, "false");
+ }
+
@Override
protected void tearDown() throws Exception {
super.tearDown();
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMultiAuthorizationPreEventListener.java Mon Oct 6 04:00:39 2014
@@ -88,6 +88,7 @@ public class TestMultiAuthorizationPreEv
// verify that the actual action also went through
Database db = msc.getDatabase(dbName);
+ listSize += 2; // 1 read database auth calls for each authorization provider
Database dbFromEvent = (Database)assertAndExtractSingleObjectFromEvent(listSize, authCalls,
DummyHiveMetastoreAuthorizationProvider.AuthCallContextType.DB);
validateCreateDb(db,dbFromEvent);
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationDrops.java Mon Oct 6 04:00:39 2014
@@ -18,88 +18,19 @@
package org.apache.hadoop.hive.ql.security;
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
-import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
-import org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Assert;
+import org.junit.Test;
/**
* Test cases focusing on drop table permission checks
*/
-public class TestStorageBasedMetastoreAuthorizationDrops extends TestCase{
- protected HiveConf clientHiveConf;
- protected HiveMetaStoreClient msc;
- protected Driver driver;
- protected UserGroupInformation ugi;
- private static int objNum = 0;
-
- protected String getAuthorizationProvider(){
- return StorageBasedAuthorizationProvider.class.getName();
- }
-
- protected HiveConf createHiveConf() throws Exception {
- return new HiveConf(this.getClass());
- }
-
- @Override
- protected void setUp() throws Exception {
-
- super.setUp();
-
- int port = MetaStoreUtils.findFreePort();
-
- // Turn on metastore-side authorization
- System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname,
- AuthorizationPreEventListener.class.getName());
- System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
- getAuthorizationProvider());
- System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
- InjectableDummyAuthenticator.class.getName());
-
- MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
-
- clientHiveConf = createHiveConf();
-
- // Turn off client-side authorization
- clientHiveConf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED,false);
-
- clientHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
- clientHiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
- clientHiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-
- clientHiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
- clientHiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-
- ugi = ShimLoader.getHadoopShims().getUGIForConf(clientHiveConf);
-
- SessionState.start(new CliSessionState(clientHiveConf));
- msc = new HiveMetaStoreClient(clientHiveConf, null);
- driver = new Driver(clientHiveConf);
-
- setupFakeUser();
- InjectableDummyAuthenticator.injectMode(false);
- }
-
+public class TestStorageBasedMetastoreAuthorizationDrops extends StorageBasedMetastoreTestBase {
+ @Test
public void testDropDatabase() throws Exception {
dropDatabaseByOtherUser("-rwxrwxrwx", 0);
dropDatabaseByOtherUser("-rwxrwxrwt", 1);
@@ -111,12 +42,12 @@ public class TestStorageBasedMetastoreAu
* @param expectedRet - expected return code for drop by other user
* @throws Exception
*/
- private void dropDatabaseByOtherUser(String perm, int expectedRet) throws Exception {
+ public void dropDatabaseByOtherUser(String perm, int expectedRet) throws Exception {
String dbName = getTestDbName();
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), perm);
CommandProcessorResponse resp = driver.run("create database " + dbName);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
@@ -124,10 +55,11 @@ public class TestStorageBasedMetastoreAu
resp = driver.run("drop database " + dbName);
- assertEquals(expectedRet, resp.getResponseCode());
+ Assert.assertEquals(expectedRet, resp.getResponseCode());
}
+ @Test
public void testDropTable() throws Exception {
dropTableByOtherUser("-rwxrwxrwx", 0);
dropTableByOtherUser("-rwxrwxrwt", 1);
@@ -138,13 +70,13 @@ public class TestStorageBasedMetastoreAu
* @param expectedRet expected return code on drop table
* @throws Exception
*/
- private void dropTableByOtherUser(String perm, int expectedRet) throws Exception {
+ public void dropTableByOtherUser(String perm, int expectedRet) throws Exception {
String dbName = getTestDbName();
String tblName = getTestTableName();
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
CommandProcessorResponse resp = driver.run("create database " + dbName);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
@@ -152,18 +84,19 @@ public class TestStorageBasedMetastoreAu
String dbDotTable = dbName + "." + tblName;
resp = driver.run("create table " + dbDotTable + "(i int)");
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
InjectableDummyAuthenticator.injectMode(true);
resp = driver.run("drop table " + dbDotTable);
- assertEquals(expectedRet, resp.getResponseCode());
+ Assert.assertEquals(expectedRet, resp.getResponseCode());
}
/**
* Drop view should not be blocked by SBA. View will not have any location to drop.
* @throws Exception
*/
+ @Test
public void testDropView() throws Exception {
String dbName = getTestDbName();
String tblName = getTestTableName();
@@ -171,7 +104,7 @@ public class TestStorageBasedMetastoreAu
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
CommandProcessorResponse resp = driver.run("create database " + dbName);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
@@ -179,20 +112,20 @@ public class TestStorageBasedMetastoreAu
String dbDotTable = dbName + "." + tblName;
resp = driver.run("create table " + dbDotTable + "(i int)");
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
String dbDotView = dbName + "." + viewName;
resp = driver.run("create view " + dbDotView + " as select * from " + dbDotTable);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
resp = driver.run("drop view " + dbDotView);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
resp = driver.run("drop table " + dbDotTable);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
}
-
+ @Test
public void testDropPartition() throws Exception {
dropPartitionByOtherUser("-rwxrwxrwx", 0);
dropPartitionByOtherUser("-rwxrwxrwt", 1);
@@ -203,70 +136,29 @@ public class TestStorageBasedMetastoreAu
* @param expectedRet expected return code
* @throws Exception
*/
- private void dropPartitionByOtherUser(String perm, int expectedRet) throws Exception {
+ public void dropPartitionByOtherUser(String perm, int expectedRet) throws Exception {
String dbName = getTestDbName();
String tblName = getTestTableName();
setPermissions(clientHiveConf.getVar(ConfVars.METASTOREWAREHOUSE), "-rwxrwxrwx");
CommandProcessorResponse resp = driver.run("create database " + dbName);
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
Database db = msc.getDatabase(dbName);
validateCreateDb(db, dbName);
setPermissions(db.getLocationUri(), "-rwxrwxrwx");
String dbDotTable = dbName + "." + tblName;
resp = driver.run("create table " + dbDotTable + "(i int) partitioned by (b string)");
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
Table tab = msc.getTable(dbName, tblName);
setPermissions(tab.getSd().getLocation(), perm);
resp = driver.run("alter table " + dbDotTable + " add partition (b='2011')");
- assertEquals(0, resp.getResponseCode());
+ Assert.assertEquals(0, resp.getResponseCode());
InjectableDummyAuthenticator.injectMode(true);
resp = driver.run("alter table " + dbDotTable + " drop partition (b='2011')");
- assertEquals(expectedRet, resp.getResponseCode());
+ Assert.assertEquals(expectedRet, resp.getResponseCode());
}
- private void setupFakeUser() {
- String fakeUser = "mal";
- List<String> fakeGroupNames = new ArrayList<String>();
- fakeGroupNames.add("groupygroup");
-
- InjectableDummyAuthenticator.injectUserName(fakeUser);
- InjectableDummyAuthenticator.injectGroupNames(fakeGroupNames);
- }
-
- private String setupUser() {
- return ugi.getUserName();
- }
-
- private String getTestTableName() {
- return this.getClass().getSimpleName() + "tab" + ++objNum;
- }
-
- private String getTestDbName() {
- return this.getClass().getSimpleName() + "db" + ++objNum;
- }
-
- @Override
- protected void tearDown() throws Exception {
- super.tearDown();
- InjectableDummyAuthenticator.injectMode(false);
- }
-
- protected void setPermissions(String locn, String permissions) throws Exception {
- FileSystem fs = FileSystem.get(new URI(locn), clientHiveConf);
- fs.setPermission(new Path(locn), FsPermission.valueOf(permissions));
- }
-
- private void validateCreateDb(Database expectedDb, String dbName) {
- assertEquals(expectedDb.getName().toLowerCase(), dbName.toLowerCase());
- }
-
- private void validateCreateTable(Table expectedTable, String tblName, String dbName) {
- assertNotNull(expectedTable);
- assertEquals(expectedTable.getTableName().toLowerCase(),tblName.toLowerCase());
- assertEquals(expectedTable.getDbName().toLowerCase(),dbName.toLowerCase());
- }
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestStorageBasedMetastoreAuthorizationProvider.java Mon Oct 6 04:00:39 2014
@@ -23,6 +23,7 @@ import java.net.URI;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider;
@@ -102,4 +103,10 @@ public class TestStorageBasedMetastoreAu
return super.getTestTableName() + "_SBAP";
}
+ @Override
+ protected void setupMetaStoreReadAuthorization() {
+ // enable read authorization in metastore
+ System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_AUTH_READS.varname, "true");
+ }
+
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java Mon Oct 6 04:00:39 2014
@@ -98,7 +98,7 @@ public class TestHiveAuthorizerCheckInvo
runCmd("create database " + dbName);
// Need a separate table for ACID testing since it has to be bucketed and it has to be Acid
runCmd("create table " + acidTableName + " (i int, j int) clustered by (i) into 2 buckets " +
- "stored as orc");
+ "stored as orc TBLPROPERTIES ('transactional'='true')");
}
private static void runCmd(String cmd) throws CommandNeedRetryException {
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hadoop/hive/thrift/TestHadoop20SAuthBridge.java Mon Oct 6 04:00:39 2014
@@ -47,6 +47,7 @@ import org.apache.hadoop.security.SaslRp
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token;
@@ -129,7 +130,7 @@ public class TestHadoop20SAuthBridge ext
}
builder.append("127.0.1.1,");
builder.append(InetAddress.getLocalHost().getCanonicalHostName());
- conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName),
+ conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
builder.toString());
}
@@ -292,7 +293,7 @@ public class TestHadoop20SAuthBridge ext
private void setGroupsInConf(String[] groupNames, String proxyUserName)
throws IOException {
conf.set(
- ProxyUsers.getProxySuperuserGroupConfKey(proxyUserName),
+ DefaultImpersonationProvider.getProxySuperuserGroupConfKey(proxyUserName),
StringUtils.join(",", Arrays.asList(groupNames)));
configureSuperUserIPAddresses(conf, proxyUserName);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/beeline/TestBeeLineWithArgs.java Mon Oct 6 04:00:39 2014
@@ -477,4 +477,31 @@ public class TestBeeLineWithArgs {
final String EXPECTED_PATTERN = "embedded_table";
testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, argList);
}
+
+ /**
+ * Test Beeline could show the query progress for time-consuming query.
+ * @throws Throwable
+ */
+ @Test
+ public void testQueryProgress() throws Throwable {
+ final String TEST_NAME = "testQueryProgress";
+ final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
+ "select count(*) from " + tableName + ";\n";
+ final String EXPECTED_PATTERN = "Parsing command";
+ testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(JDBC_URL));
+ }
+
+ /**
+ * Test Beeline will hide the query progress when silent option is set.
+ * @throws Throwable
+ */
+ @Test
+ public void testQueryProgressHidden() throws Throwable {
+ final String TEST_NAME = "testQueryProgress";
+ final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
+ "!set silent true\n" +
+ "select count(*) from " + tableName + ";\n";
+ final String EXPECTED_PATTERN = "Parsing command";
+ testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, false, getBaseArgs(JDBC_URL));
+ }
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Mon Oct 6 04:00:39 2014
@@ -1318,6 +1318,42 @@ public class TestJdbcDriver2 {
}
@Test
+ public void testResultSetColumnNameCaseInsensitive() throws SQLException {
+ Statement stmt = con.createStatement();
+ ResultSet res;
+
+ res = stmt.executeQuery("select c1 from " + dataTypeTableName + " limit 1");
+ try {
+ int count = 0;
+ while (res.next()) {
+ res.findColumn("c1");
+ res.findColumn("C1");
+ count++;
+ }
+ assertEquals(count, 1);
+ } catch (Exception e) {
+ String msg = "Unexpected exception: " + e;
+ LOG.info(msg, e);
+ fail(msg);
+ }
+
+ res = stmt.executeQuery("select c1 C1 from " + dataTypeTableName + " limit 1");
+ try {
+ int count = 0;
+ while (res.next()) {
+ res.findColumn("c1");
+ res.findColumn("C1");
+ count++;
+ }
+ assertEquals(count, 1);
+ } catch (Exception e) {
+ String msg = "Unexpected exception: " + e;
+ LOG.info(msg, e);
+ fail(msg);
+ }
+ }
+
+ @Test
public void testResultSetMetaData() throws SQLException {
Statement stmt = con.createStatement();
@@ -2130,4 +2166,82 @@ public class TestJdbcDriver2 {
}
stmt.close();
}
+
+ /**
+ * Test getting query log method in Jdbc
+ * @throws Exception
+ */
+ @Test
+ public void testGetQueryLog() throws Exception {
+ // Prepare
+ String[] expectedLogs = {
+ "Parsing command",
+ "Parse Completed",
+ "Starting Semantic Analysis",
+ "Semantic Analysis Completed",
+ "Starting command"
+ };
+ String sql = "select count(*) from " + tableName;
+
+ // Verify the fetched log (from the beginning of log file)
+ HiveStatement stmt = (HiveStatement)con.createStatement();
+ assertNotNull("Statement is null", stmt);
+ stmt.executeQuery(sql);
+ List<String> logs = stmt.getQueryLog(false, 10000);
+ stmt.close();
+ verifyFetchedLog(logs, expectedLogs);
+
+ // Verify the fetched log (incrementally)
+ final HiveStatement statement = (HiveStatement)con.createStatement();
+ assertNotNull("Statement is null", statement);
+ statement.setFetchSize(10000);
+ final List<String> incrementalLogs = new ArrayList<String>();
+
+ Runnable logThread = new Runnable() {
+ @Override
+ public void run() {
+ while (statement.hasMoreLogs()) {
+ try {
+ incrementalLogs.addAll(statement.getQueryLog());
+ Thread.sleep(500);
+ } catch (SQLException e) {
+ LOG.error("Failed getQueryLog. Error message: " + e.getMessage());
+ fail("error in getting log thread");
+ } catch (InterruptedException e) {
+ LOG.error("Getting log thread is interrupted. Error message: " + e.getMessage());
+ fail("error in getting log thread");
+ }
+ }
+ }
+ };
+
+ Thread thread = new Thread(logThread);
+ thread.setDaemon(true);
+ thread.start();
+ statement.executeQuery(sql);
+ thread.interrupt();
+ thread.join(10000);
+ // fetch remaining logs
+ List<String> remainingLogs;
+ do {
+ remainingLogs = statement.getQueryLog();
+ incrementalLogs.addAll(remainingLogs);
+ } while (remainingLogs.size() > 0);
+ statement.close();
+
+ verifyFetchedLog(incrementalLogs, expectedLogs);
+ }
+
+ private void verifyFetchedLog(List<String> logs, String[] expectedLogs) {
+ StringBuilder stringBuilder = new StringBuilder();
+
+ for (String log : logs) {
+ stringBuilder.append(log);
+ }
+
+ String accumulatedLogs = stringBuilder.toString();
+ for (String expectedLog : expectedLogs) {
+ assertTrue(accumulatedLogs.contains(expectedLog));
+ }
+ }
}
Modified: hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java (original)
+++ hive/branches/spark/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java Mon Oct 6 04:00:39 2014
@@ -120,9 +120,10 @@ public class TestHS2AuthzContext {
HiveAuthzContext context = contextCapturer.getValue();
assertEquals("Command ", ctxCmd, context.getCommandString());
- assertTrue("ip address pattern check", context.getIpAddress().contains("."));
+ assertTrue("ip address pattern check", context.getIpAddress().matches("[.:a-fA-F0-9]+"));
// ip address size check - check for something better than non zero
assertTrue("ip address size check", context.getIpAddress().length() > 7);
+
}
private Connection getConnection(String userName) throws SQLException {
Modified: hive/branches/spark/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/src/test/resources/testconfiguration.properties?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/spark/itests/src/test/resources/testconfiguration.properties Mon Oct 6 04:00:39 2014
@@ -55,6 +55,7 @@ minitez.query.files.shared=alter_merge_2
bucket2.q,\
bucket3.q,\
bucket4.q,\
+ cbo_correctness.q,\
correlationoptimizer1.q,\
count.q,\
create_merge_compressed.q,\
@@ -74,6 +75,7 @@ minitez.query.files.shared=alter_merge_2
disable_merge_for_bucketing.q,\
dynpart_sort_opt_vectorization.q,\
dynpart_sort_optimization.q,\
+ dynpart_sort_optimization2.q,\
enforce_order.q,\
filter_join_breaktask.q,\
filter_join_breaktask2.q,\
@@ -116,6 +118,7 @@ minitez.query.files.shared=alter_merge_2
orc_merge7.q,\
orc_merge_incompat1.q,\
orc_merge_incompat2.q,\
+ orc_vectorization_ppd.q,\
parallel.q,\
ptf.q,\
sample1.q,\
@@ -152,24 +155,73 @@ minitez.query.files.shared=alter_merge_2
update_where_non_partitioned.q,\
update_where_partitioned.q,\
update_two_cols.q,\
+ vector_between_in.q,\
vector_cast_constant.q,\
+ vector_char_4.q,\
+ vector_char_simple.q,\
+ vector_count_distinct.q,\
vector_data_types.q,\
vector_decimal_aggregate.q,\
+ vector_distinct_2.q,\
+ vector_elt.q,\
+ vector_groupby_3.q,\
vector_left_outer_join.q,\
vector_mapjoin_reduce.q,\
+ vector_non_string_partition.q,\
+ vector_orderby_5.q,\
vector_string_concat.q,\
+ vector_varchar_4.q,\
+ vector_varchar_simple.q,\
+ vectorization_0.q,\
+ vectorization_1.q,\
+ vectorization_10.q,\
+ vectorization_11.q,\
vectorization_12.q,\
vectorization_13.q,\
vectorization_14.q,\
vectorization_15.q,\
+ vectorization_16.q,\
+ vectorization_2.q,\
+ vectorization_3.q,\
+ vectorization_4.q,\
+ vectorization_5.q,\
+ vectorization_6.q,\
vectorization_9.q,\
+ vectorization_decimal_date.q,\
+ vectorization_div0.q,\
+ vectorization_nested_udf.q,\
+ vectorization_not.q,\
+ vectorization_part.q,\
vectorization_part_project.q,\
+ vectorization_pushdown.q,\
vectorization_short_regress.q,\
+ vectorized_bucketmapjoin1.q,\
+ vectorized_case.q,\
+ vectorized_context.q,\
vectorized_mapjoin.q,\
+ vectorized_math_funcs.q,\
vectorized_nested_mapjoin.q,\
+ vectorized_parquet.q,\
vectorized_ptf.q,\
+ vectorized_rcfile_columnar.q,\
vectorized_shufflejoin.q,\
- vectorized_timestamp_funcs.q
+ vectorized_string_funcs.q,\
+ vectorized_timestamp_funcs.q,\
+ auto_sortmerge_join_1.q,\
+ auto_sortmerge_join_10.q,\
+ auto_sortmerge_join_11.q,\
+ auto_sortmerge_join_12.q,\
+ auto_sortmerge_join_13.q,\
+ auto_sortmerge_join_14.q,\
+ auto_sortmerge_join_15.q,\
+ auto_sortmerge_join_16.q,\
+ auto_sortmerge_join_2.q,\
+ auto_sortmerge_join_3.q,\
+ auto_sortmerge_join_4.q,\
+ auto_sortmerge_join_5.q,\
+ auto_sortmerge_join_7.q,\
+ auto_sortmerge_join_8.q,\
+ auto_sortmerge_join_9.q
minitez.query.files=bucket_map_join_tez1.q,\
bucket_map_join_tez2.q,\
@@ -186,7 +238,11 @@ minitez.query.files=bucket_map_join_tez1
tez_joins_explain.q,\
tez_schema_evolution.q,\
tez_union.q,\
- tez_union_decimal.q
+ tez_union_decimal.q,\
+ tez_union_group_by.q,\
+ tez_smb_main.q,\
+ tez_smb_1.q,\
+ vectorized_dynamic_partition_pruning.q
beeline.positive.exclude=add_part_exist.q,\
alter1.q,\
@@ -342,6 +398,7 @@ beeline.positive.exclude=add_part_exist.
minimr.query.negative.files=cluster_tasklog_retrieval.q,\
file_with_header_footer_negative.q,\
+ local_mapred_error_cache.q,\
mapreduce_stack_trace.q,\
mapreduce_stack_trace_hadoop20.q,\
mapreduce_stack_trace_turnoff.q,\
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java Mon Oct 6 04:00:39 2014
@@ -690,7 +690,10 @@ public class QTestUtil {
// conf.logVars(System.out);
// System.out.flush();
+ String execEngine = conf.get("hive.execution.engine");
+ conf.set("hive.execution.engine", "mr");
SessionState.start(conf);
+ conf.set("hive.execution.engine", execEngine);
db = Hive.get(conf);
fs = FileSystem.get(conf);
drv = new Driver(conf);
@@ -771,6 +774,8 @@ public class QTestUtil {
HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
"org.apache.hadoop.hive.ql.security.DummyAuthenticator");
+ String execEngine = conf.get("hive.execution.engine");
+ conf.set("hive.execution.engine", "mr");
CliSessionState ss = new CliSessionState(conf);
assert ss != null;
ss.in = System.in;
@@ -788,6 +793,7 @@ public class QTestUtil {
isSessionStateStarted = true;
+ conf.set("hive.execution.engine", execEngine);
return ss;
}
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Mon Oct 6 04:00:39 2014
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.udf;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDAF;
import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -32,6 +33,8 @@ import org.apache.hadoop.io.Text;
* UDAFTestMax.
*
*/
+@Description(name = "test_max",
+value = "_FUNC_(col) - UDF to report Max Value")
public class UDAFTestMax extends UDAF {
/**
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFFileLookup.java Mon Oct 6 04:00:39 2014
@@ -27,6 +27,7 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -34,6 +35,8 @@ import org.apache.hadoop.io.Text;
/**
* A UDF for testing, which does key/value lookup from a file
*/
+@Description(name = "lookup",
+value = "_FUNC_(col) - UDF for key/value lookup from a file")
public class UDFFileLookup extends UDF {
static Log LOG = LogFactory.getLog(UDFFileLookup.class);
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestErrorOnFalse.java Mon Oct 6 04:00:39 2014
@@ -18,11 +18,14 @@
package org.apache.hadoop.hive.ql.udf;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
/**
* A UDF for testing, which throws RuntimeException if the length of a string.
*/
+@Description(name = "test_error",
+value = "_FUNC_(col) - UDF throws RuntimeException if expression evaluates to false")
public class UDFTestErrorOnFalse extends UDF {
public int evaluate(Boolean b) {
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Mon Oct 6 04:00:39 2014
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.udf;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
@@ -25,6 +26,8 @@ import org.apache.hadoop.io.Text;
/**
* A UDF for testing, which evaluates the length of a string.
*/
+@Description(name = "testlength",
+value = "_FUNC_(col) - UDF evaluates the length of the string")
public class UDFTestLength extends UDF {
IntWritable result = new IntWritable();
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDFTestLength2.java Mon Oct 6 04:00:39 2014
@@ -18,12 +18,15 @@
package org.apache.hadoop.hive.ql.udf;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDF;
/**
* A UDF for testing, which evaluates the length of a string. This UDF uses Java
* Primitive classes for parameters.
*/
+@Description(name = "testlength2",
+value = "_FUNC_(col) - UDF evaluates the length of the string and returns value as Java Integer")
public class UDFTestLength2 extends UDF {
public Integer evaluate(String s) {
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java Mon Oct 6 04:00:39 2014
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.MapredContext;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -26,7 +27,8 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.Reporter;
-
+@Description(name = "counter",
+value = "_FUNC_(col) - UDF to report MR counter values")
public class DummyContextUDF extends GenericUDF {
private MapredContext context;
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaBoolean.java Mon Oct 6 04:00:39 2014
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -27,6 +28,8 @@ import org.apache.hadoop.hive.serde2.obj
/**
* A test GenericUDF to return native Java's boolean type
*/
+@Description(name = "test_udf_get_java_boolean",
+value = "_FUNC_(str) - GenericUDF to return native Java's boolean type")
public class GenericUDFTestGetJavaBoolean extends GenericUDF {
ObjectInspector[] argumentOIs;
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestGetJavaString.java Mon Oct 6 04:00:39 2014
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -27,6 +28,8 @@ import org.apache.hadoop.hive.serde2.obj
/**
* A test GenericUDF to return native Java's string type
*/
+@Description(name = "test_udf_get_java_string",
+value = "_FUNC_(str) - GenericUDF to return native Java's string type")
public class GenericUDFTestGetJavaString extends GenericUDF {
ObjectInspector[] argumentOIs;
Modified: hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java (original)
+++ hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTestTranslate.java Mon Oct 6 04:00:39 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.udf.ge
import java.util.HashSet;
import java.util.Set;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
@@ -34,6 +35,8 @@ import org.apache.hadoop.io.Text;
/**
* Mimics oracle's function translate(str1, str2, str3).
*/
+@Description(name = "test_translate",
+value = "_FUNC_(str1, str2, str3) - Mimics oracle's function translate(str1, str2, str3)")
public class GenericUDFTestTranslate extends GenericUDF {
private transient ObjectInspector[] argumentOIs;
Modified: hive/branches/spark/jdbc/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/pom.xml?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/jdbc/pom.xml (original)
+++ hive/branches/spark/jdbc/pom.xml Mon Oct 6 04:00:39 2014
@@ -41,8 +41,14 @@
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
- <artifactId>hive-metastore</artifactId>
+ <artifactId>hive-service</artifactId>
<version>${project.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hive</groupId>
+ <artifactId>hive-exec</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
@@ -51,12 +57,12 @@
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
- <artifactId>hive-service</artifactId>
+ <artifactId>hive-metastore</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
- <artifactId>hive-exec</artifactId>
+ <artifactId>hive-shims</artifactId>
<version>${project.version}</version>
</dependency>
<!-- inter-project -->
@@ -135,8 +141,39 @@
<minimizeJar>true</minimizeJar>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>${hive.jdbc.driver.classifier}</shadedClassifierName>
+ <filters>
+ <filter>
+ <artifact>org.apache.hive.shims:hive-shims-common</artifact>
+ <includes>
+ <include>org/apache/hadoop/hive/shims/*</include>
+ <include>org/apache/hadoop/hive/thrift/*</include>
+ </includes>
+ </filter>
+ <filter>
+ <artifact>org.apache.hive.shims:hive-shims-common-secure</artifact>
+ <includes>
+ <include>org/apache/hadoop/hive/thrift/*</include>
+ <include>org/apache/hadoop/hive/thrift/client/*</include>
+ </includes>
+ </filter>
+ <filter>
+ <artifact>org.apache.hive.shims:hive-shims-0.23</artifact>
+ <includes>
+ <include>org/apache/hadoop/hive/thrift/*</include>
+ </includes>
+ </filter>
+ <filter>
+ <artifact>*:*</artifact>
+ <excludes>
+ <exclude>META-INF/*.SF</exclude>
+ <exclude>META-INF/*.DSA</exclude>
+ <exclude>META-INF/*.RSA</exclude>
+ </excludes>
+ </filter>
+ </filters>
<artifactSet>
<excludes>
+ <exclude>org.apache.commons:commons-compress</exclude>
<exclude>org.apache.hadoop:*</exclude>
<exclude>org.apache.hive:hive-ant</exclude>
<exclude>org.apache.ant:*</exclude>
@@ -150,23 +187,16 @@
<exclude>org.tukaani:*</exclude>
<exclude>org.iq80.snappy:*</exclude>
<exclude>org.apache.velocity:*</exclude>
+ <exclude>net.sf.jpam:*</exclude>
+ <exclude>org.apache.avro:*</exclude>
+ <exclude>net.sf.opencsv:*</exclude>
+ <exclude>org.antlr:*</exclude>
</excludes>
- </artifactSet>
- <filters>
- <filter>
- <artifact>*:*</artifact>
- <excludes>
- <exclude>META-INF/*.SF</exclude>
- <exclude>META-INF/*.DSA</exclude>
- <exclude>META-INF/*.RSA</exclude>
- </excludes>
- </filter>
- </filters>
+ </artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
-
</project>
Modified: hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveDriver.java Mon Oct 6 04:00:39 2014
@@ -102,8 +102,9 @@ public class HiveDriver implements Drive
return Pattern.matches(URL_PREFIX + ".*", url);
}
+ @Override
public Connection connect(String url, Properties info) throws SQLException {
- return new HiveConnection(url, info);
+ return acceptsURL(url) ? new HiveConnection(url, info) : null;
}
/**
Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java Mon Oct 6 04:00:39 2014
@@ -58,6 +58,7 @@ public abstract class HiveBaseResultSet
protected boolean wasNull = false;
protected Object[] row;
protected List<String> columnNames;
+ protected List<String> normalizedColumnNames;
protected List<String> columnTypes;
protected List<JdbcColumnAttributes> columnAttributes;
@@ -84,7 +85,7 @@ public abstract class HiveBaseResultSet
}
public int findColumn(String columnName) throws SQLException {
- int columnIndex = columnNames.indexOf(columnName);
+ int columnIndex = normalizedColumnNames.indexOf(columnName.toLowerCase());
if (columnIndex==-1) {
throw new SQLException();
} else {
Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveMetaDataResultSet.java Mon Oct 6 04:00:39 2014
@@ -36,8 +36,13 @@ public abstract class HiveMetaDataResult
}
if (columnNames!=null) {
this.columnNames = new ArrayList<String>(columnNames);
+ this.normalizedColumnNames = new ArrayList<String>();
+ for (String colName : columnNames) {
+ this.normalizedColumnNames.add(colName.toLowerCase());
+ }
} else {
this.columnNames = new ArrayList<String>();
+ this.normalizedColumnNames = new ArrayList<String>();
}
if (columnTypes!=null) {
this.columnTypes = new ArrayList<String>(columnTypes);
Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveQueryResultSet.java Mon Oct 6 04:00:39 2014
@@ -28,6 +28,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -75,6 +76,7 @@ public class HiveQueryResultSet extends
private boolean fetchFirst = false;
private final TProtocolVersion protocol;
+ private ReentrantLock transportLock;
public static class Builder {
@@ -98,6 +100,7 @@ public class HiveQueryResultSet extends
private int fetchSize = 50;
private boolean emptyResultSet = false;
private boolean isScrollable = false;
+ private ReentrantLock transportLock = null;
public Builder(Statement statement) throws SQLException {
this.statement = statement;
@@ -166,6 +169,11 @@ public class HiveQueryResultSet extends
return this;
}
+ public Builder setTransportLock(ReentrantLock transportLock) {
+ this.transportLock = transportLock;
+ return this;
+ }
+
public HiveQueryResultSet build() throws SQLException {
return new HiveQueryResultSet(this);
}
@@ -181,7 +189,9 @@ public class HiveQueryResultSet extends
this.stmtHandle = builder.stmtHandle;
this.sessHandle = builder.sessHandle;
this.fetchSize = builder.fetchSize;
+ this.transportLock = builder.transportLock;
columnNames = new ArrayList<String>();
+ normalizedColumnNames = new ArrayList<String>();
columnTypes = new ArrayList<String>();
columnAttributes = new ArrayList<JdbcColumnAttributes>();
if (builder.retrieveSchema) {
@@ -239,7 +249,17 @@ public class HiveQueryResultSet extends
try {
TGetResultSetMetadataReq metadataReq = new TGetResultSetMetadataReq(stmtHandle);
// TODO need session handle
- TGetResultSetMetadataResp metadataResp = client.GetResultSetMetadata(metadataReq);
+ TGetResultSetMetadataResp metadataResp;
+ if (transportLock == null) {
+ metadataResp = client.GetResultSetMetadata(metadataReq);
+ } else {
+ transportLock.lock();
+ try {
+ metadataResp = client.GetResultSetMetadata(metadataReq);
+ } finally {
+ transportLock.unlock();
+ }
+ }
Utils.verifySuccess(metadataResp.getStatus());
StringBuilder namesSb = new StringBuilder();
@@ -260,6 +280,7 @@ public class HiveQueryResultSet extends
}
String columnName = columns.get(pos).getColumnName();
columnNames.add(columnName);
+ normalizedColumnNames.add(columnName.toLowerCase());
TPrimitiveTypeEntry primitiveTypeEntry =
columns.get(pos).getTypeDesc().getTypes().get(0).getPrimitiveEntry();
String columnTypeName = TYPE_NAMES.get(primitiveTypeEntry.getType());
@@ -284,6 +305,10 @@ public class HiveQueryResultSet extends
columnNames.addAll(colNames);
columnTypes.addAll(colTypes);
columnAttributes.addAll(colAttributes);
+
+ for (String colName : colNames) {
+ normalizedColumnNames.add(colName.toLowerCase());
+ }
}
@Override
@@ -326,7 +351,17 @@ public class HiveQueryResultSet extends
if (fetchedRows == null || !fetchedRowsItr.hasNext()) {
TFetchResultsReq fetchReq = new TFetchResultsReq(stmtHandle,
orientation, fetchSize);
- TFetchResultsResp fetchResp = client.FetchResults(fetchReq);
+ TFetchResultsResp fetchResp;
+ if (transportLock == null) {
+ fetchResp = client.FetchResults(fetchReq);
+ } else {
+ transportLock.lock();
+ try {
+ fetchResp = client.FetchResults(fetchReq);
+ } finally {
+ transportLock.unlock();
+ }
+ }
Utils.verifySuccessWithInfo(fetchResp.getStatus());
TRowSet results = fetchResp.getResults();
Modified: hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java (original)
+++ hive/branches/spark/jdbc/src/java/org/apache/hive/jdbc/HiveStatement.java Mon Oct 6 04:00:39 2014
@@ -23,10 +23,14 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
+import java.util.ArrayList;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.thrift.TCLIService;
import org.apache.hive.service.cli.thrift.TCancelOperationReq;
import org.apache.hive.service.cli.thrift.TCancelOperationResp;
@@ -38,6 +42,9 @@ import org.apache.hive.service.cli.thrif
import org.apache.hive.service.cli.thrift.TGetOperationStatusResp;
import org.apache.hive.service.cli.thrift.TOperationHandle;
import org.apache.hive.service.cli.thrift.TSessionHandle;
+import org.apache.hive.service.cli.thrift.TFetchResultsReq;
+import org.apache.hive.service.cli.thrift.TFetchResultsResp;
+import org.apache.hive.service.cli.thrift.TFetchOrientation;
/**
* HiveStatement.
@@ -77,6 +84,27 @@ public class HiveStatement implements ja
*/
private boolean isClosed = false;
+ /**
+ * Keep state so we can fail certain calls made after cancel().
+ */
+ private boolean isCancelled = false;
+
+ /**
+ * Keep this state so we can know whether the query in this statement is closed.
+ */
+ private boolean isQueryClosed = false;
+
+ /**
+ * Keep this state so we can know whether the query logs are being generated in HS2.
+ */
+ private boolean isLogBeingGenerated = true;
+
+ /**
+ * Keep this state so we can know whether the statement is submitted to HS2 and start execution
+ * successfully.
+ */
+ private boolean isExecuteStatementFailed = false;
+
// A fair reentrant lock
private ReentrantLock transportLock = new ReentrantLock(true);
@@ -113,6 +141,9 @@ public class HiveStatement implements ja
@Override
public void cancel() throws SQLException {
checkConnection("cancel");
+ if (isCancelled) {
+ return;
+ }
transportLock.lock();
try {
@@ -128,6 +159,7 @@ public class HiveStatement implements ja
} finally {
transportLock.unlock();
}
+ isCancelled = true;
}
/*
@@ -167,6 +199,8 @@ public class HiveStatement implements ja
} finally {
transportLock.unlock();
}
+ isQueryClosed = true;
+ isExecuteStatementFailed = false;
stmtHandle = null;
}
@@ -202,6 +236,7 @@ public class HiveStatement implements ja
checkConnection("execute");
closeClientOperation();
+ initFlags();
TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, sql);
/**
@@ -218,9 +253,12 @@ public class HiveStatement implements ja
TExecuteStatementResp execResp = client.ExecuteStatement(execReq);
Utils.verifySuccessWithInfo(execResp.getStatus());
stmtHandle = execResp.getOperationHandle();
+ isExecuteStatementFailed = false;
} catch (SQLException eS) {
+ isExecuteStatementFailed = true;
throw eS;
} catch (Exception ex) {
+ isExecuteStatementFailed = true;
throw new SQLException(ex.toString(), "08S01", ex);
} finally {
transportLock.unlock();
@@ -266,11 +304,14 @@ public class HiveStatement implements ja
}
}
} catch (SQLException e) {
+ isLogBeingGenerated = false;
throw e;
} catch (Exception e) {
+ isLogBeingGenerated = false;
throw new SQLException(e.toString(), "08S01", e);
}
}
+ isLogBeingGenerated = false;
// The query should be completed by now
if (!stmtHandle.isHasResultSet()) {
@@ -278,7 +319,7 @@ public class HiveStatement implements ja
}
resultSet = new HiveQueryResultSet.Builder(this).setClient(client).setSessionHandle(sessHandle)
.setStmtHandle(stmtHandle).setMaxRows(maxRows).setFetchSize(fetchSize)
- .setScrollable(isScrollableResultset)
+ .setScrollable(isScrollableResultset).setTransportLock(transportLock)
.build();
return true;
}
@@ -289,6 +330,13 @@ public class HiveStatement implements ja
}
}
+ private void initFlags() {
+ isCancelled = false;
+ isQueryClosed = false;
+ isLogBeingGenerated = true;
+ isExecuteStatementFailed = false;
+ }
+
/*
* (non-Javadoc)
*
@@ -713,4 +761,93 @@ public class HiveStatement implements ja
throw new SQLException("Cannot unwrap to " + iface);
}
+ /**
+ * Check whether query execution might be producing more logs to be fetched.
+ * This method is a public API for usage outside of Hive, although it is not part of the
+ * interface java.sql.Statement.
+ * @return true if query execution might be producing more logs. It does not indicate if last
+ * log lines have been fetched by getQueryLog.
+ */
+ public boolean hasMoreLogs() {
+ return isLogBeingGenerated;
+ }
+
+ /**
+ * Get the execution logs of the given SQL statement.
+ * This method is a public API for usage outside of Hive, although it is not part of the
+ * interface java.sql.Statement.
+ * This method gets the incremental logs during SQL execution, and uses fetchSize holden by
+ * HiveStatement object.
+ * @return a list of logs. It can be empty if there are no new logs to be retrieved at that time.
+ * @throws SQLException
+ * @throws ClosedOrCancelledStatementException if statement has been cancelled or closed
+ */
+ public List<String> getQueryLog() throws SQLException, ClosedOrCancelledStatementException {
+ return getQueryLog(true, fetchSize);
+ }
+
+ /**
+ * Get the execution logs of the given SQL statement.
+ * This method is a public API for usage outside of Hive, although it is not part of the
+ * interface java.sql.Statement.
+ * @param incremental indicate getting logs either incrementally or from the beginning,
+ * when it is true or false.
+ * @param fetchSize the number of lines to fetch
+ * @return a list of logs. It can be empty if there are no new logs to be retrieved at that time.
+ * @throws SQLException
+ * @throws ClosedOrCancelledStatementException if statement has been cancelled or closed
+ */
+ public List<String> getQueryLog(boolean incremental, int fetchSize)
+ throws SQLException, ClosedOrCancelledStatementException {
+ checkConnection("getQueryLog");
+ if (isCancelled) {
+ throw new ClosedOrCancelledStatementException("Method getQueryLog() failed. The " +
+ "statement has been closed or cancelled.");
+ }
+
+ List<String> logs = new ArrayList<String>();
+ TFetchResultsResp tFetchResultsResp = null;
+ transportLock.lock();
+ try {
+ if (stmtHandle != null) {
+ TFetchResultsReq tFetchResultsReq = new TFetchResultsReq(stmtHandle,
+ getFetchOrientation(incremental), fetchSize);
+ tFetchResultsReq.setFetchType((short)1);
+ tFetchResultsResp = client.FetchResults(tFetchResultsReq);
+ Utils.verifySuccessWithInfo(tFetchResultsResp.getStatus());
+ } else {
+ if (isQueryClosed) {
+ throw new ClosedOrCancelledStatementException("Method getQueryLog() failed. The " +
+ "statement has been closed or cancelled.");
+ }
+ if (isExecuteStatementFailed) {
+ throw new SQLException("Method getQueryLog() failed. Because the stmtHandle in " +
+ "HiveStatement is null and the statement execution might fail.");
+ } else {
+ return logs;
+ }
+ }
+ } catch (SQLException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new SQLException("Error when getting query log: " + e, e);
+ } finally {
+ transportLock.unlock();
+ }
+
+ RowSet rowSet = RowSetFactory.create(tFetchResultsResp.getResults(),
+ connection.getProtocol());
+ for (Object[] row : rowSet) {
+ logs.add((String)row[0]);
+ }
+ return logs;
+ }
+
+ private TFetchOrientation getFetchOrientation(boolean incremental) {
+ if (incremental) {
+ return TFetchOrientation.FETCH_NEXT;
+ } else {
+ return TFetchOrientation.FETCH_FIRST;
+ }
+ }
}
Modified: hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql
URL: http://svn.apache.org/viewvc/hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql (original)
+++ hive/branches/spark/metastore/scripts/upgrade/mssql/hive-schema-0.14.0.mssql.sql Mon Oct 6 04:00:39 2014
@@ -836,14 +836,14 @@ CREATE INDEX TABLE_PARAMS_N49 ON TABLE_P
-- These are not part of package jdo, so if you are going to regenerate this file you need to manually add the following section back to the file.
-- -----------------------------------------------------------------------------------------------------------------------------------------------
CREATE TABLE COMPACTION_QUEUE(
- CQ_ID int NOT NULL,
+ CQ_ID bigint NOT NULL,
CQ_DATABASE varchar(128) NOT NULL,
CQ_TABLE varchar(128) NOT NULL,
CQ_PARTITION varchar(767) NULL,
CQ_STATE char(1) NOT NULL,
CQ_TYPE char(1) NOT NULL,
CQ_WORKER_ID varchar(128) NULL,
- CQ_START int NULL,
+ CQ_START bigint NULL,
CQ_RUN_AS varchar(128) NULL,
PRIMARY KEY CLUSTERED
(
@@ -852,23 +852,23 @@ PRIMARY KEY CLUSTERED
);
CREATE TABLE COMPLETED_TXN_COMPONENTS(
- CTC_TXNID int NULL,
+ CTC_TXNID bigint NULL,
CTC_DATABASE varchar(128) NOT NULL,
CTC_TABLE varchar(128) NULL,
CTC_PARTITION varchar(767) NULL
);
CREATE TABLE HIVE_LOCKS(
- HL_LOCK_EXT_ID int NOT NULL,
- HL_LOCK_INT_ID int NOT NULL,
- HL_TXNID int NULL,
+ HL_LOCK_EXT_ID bigint NOT NULL,
+ HL_LOCK_INT_ID bigint NOT NULL,
+ HL_TXNID bigint NULL,
HL_DB varchar(128) NOT NULL,
HL_TABLE varchar(128) NULL,
HL_PARTITION varchar(767) NULL,
HL_LOCK_STATE char(1) NOT NULL,
HL_LOCK_TYPE char(1) NOT NULL,
- HL_LAST_HEARTBEAT int NOT NULL,
- HL_ACQUIRED_AT int NULL,
+ HL_LAST_HEARTBEAT bigint NOT NULL,
+ HL_ACQUIRED_AT bigint NULL,
HL_USER varchar(128) NOT NULL,
HL_HOST varchar(128) NOT NULL,
PRIMARY KEY CLUSTERED
@@ -879,28 +879,28 @@ PRIMARY KEY CLUSTERED
);
CREATE TABLE NEXT_COMPACTION_QUEUE_ID(
- NCQ_NEXT int NOT NULL
+ NCQ_NEXT bigint NOT NULL
);
INSERT INTO NEXT_COMPACTION_QUEUE_ID VALUES(1);
CREATE TABLE NEXT_LOCK_ID(
- NL_NEXT int NOT NULL
+ NL_NEXT bigint NOT NULL
);
INSERT INTO NEXT_LOCK_ID VALUES(1);
CREATE TABLE NEXT_TXN_ID(
- NTXN_NEXT int NOT NULL
+ NTXN_NEXT bigint NOT NULL
);
INSERT INTO NEXT_TXN_ID VALUES(1);
CREATE TABLE TXNS(
- TXN_ID int NOT NULL,
+ TXN_ID bigint NOT NULL,
TXN_STATE char(1) NOT NULL,
- TXN_STARTED int NOT NULL,
- TXN_LAST_HEARTBEAT int NOT NULL,
+ TXN_STARTED bigint NOT NULL,
+ TXN_LAST_HEARTBEAT bigint NOT NULL,
TXN_USER varchar(128) NOT NULL,
TXN_HOST varchar(128) NOT NULL,
PRIMARY KEY CLUSTERED
@@ -910,7 +910,7 @@ PRIMARY KEY CLUSTERED
);
CREATE TABLE TXN_COMPONENTS(
- TC_TXNID int NULL,
+ TC_TXNID bigint NULL,
TC_DATABASE varchar(128) NOT NULL,
TC_TABLE varchar(128) NULL,
TC_PARTITION varchar(767) NULL
Modified: hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql
URL: http://svn.apache.org/viewvc/hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql (original)
+++ hive/branches/spark/metastore/scripts/upgrade/mssql/upgrade-0.13.0-to-0.14.0.mssql.sql Mon Oct 6 04:00:39 2014
@@ -1,6 +1,7 @@
SELECT 'Upgrading MetaStore schema from 0.13.0 to 0.14.0' AS MESSAGE;
:r 002-HIVE-7784.mssql.sql;
+:r 003-HIVE-8239.mssql.sql;
UPDATE VERSION SET SCHEMA_VERSION='0.14.0', VERSION_COMMENT='Hive release version 0.14.0' where VER_ID=1;
SELECT 'Finished upgrading MetaStore schema from 0.13.0 to 0.14.0' AS MESSAGE;
Modified: hive/branches/spark/metastore/scripts/upgrade/oracle/upgrade-0.13.0-to-0.14.0.oracle.sql
URL: http://svn.apache.org/viewvc/hive/branches/spark/metastore/scripts/upgrade/oracle/upgrade-0.13.0-to-0.14.0.oracle.sql?rev=1629563&r1=1629562&r2=1629563&view=diff
==============================================================================
--- hive/branches/spark/metastore/scripts/upgrade/oracle/upgrade-0.13.0-to-0.14.0.oracle.sql (original)
+++ hive/branches/spark/metastore/scripts/upgrade/oracle/upgrade-0.13.0-to-0.14.0.oracle.sql Mon Oct 6 04:00:39 2014
@@ -1,5 +1,6 @@
SELECT 'Upgrading MetaStore schema from 0.13.0 to 0.14.0' AS Status from dual;
+@019-HIVE-7118.oracle.sql;
@020-HIVE-7784.oracle.sql;
UPDATE VERSION SET SCHEMA_VERSION='0.14.0', VERSION_COMMENT='Hive release version 0.14.0' where VER_ID=1;