You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by th...@apache.org on 2014/08/16 04:49:44 UTC
svn commit: r1618306 - in /hive/trunk:
itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/
ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/ql/hooks/
ql/src/java/org/apache/hadoop/hive/ql/par...
Author: thejas
Date: Sat Aug 16 02:49:44 2014
New Revision: 1618306
URL: http://svn.apache.org/r1618306
Log:
HIVE-7700 : authorization api - HivePrivilegeObject for permanent function should have database name set (Thejas Nair, reviewed by Jason Dere)
Modified:
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
hive/trunk/ql/src/test/results/clientpositive/authorization_create_func1.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_using.q.out
Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java Sat Aug 16 02:49:44 2014
@@ -30,6 +30,8 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
@@ -43,7 +45,6 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
-import org.mockito.Matchers;
import org.mockito.Mockito;
/**
@@ -52,7 +53,10 @@ import org.mockito.Mockito;
public class TestHiveAuthorizerCheckInvocation {
protected static HiveConf conf;
protected static Driver driver;
- private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName();
+ private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
+ + "Table";
+ private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName()
+ + "Db";
static HiveAuthorizer mockedAuthorizer;
/**
@@ -82,8 +86,13 @@ public class TestHiveAuthorizerCheckInvo
SessionState.start(conf);
driver = new Driver(conf);
- CommandProcessorResponse resp = driver.run("create table " + tableName
+ runCmd("create table " + tableName
+ " (i int, j int, k string) partitioned by (city string, date string) ");
+ runCmd("create database " + dbName);
+ }
+
+ private static void runCmd(String cmd) throws CommandNeedRetryException {
+ CommandProcessorResponse resp = driver.run(cmd);
assertEquals(0, resp.getResponseCode());
}
@@ -101,7 +110,7 @@ public class TestHiveAuthorizerCheckInvo
+ " where k = 'X' and city = 'Scottsdale-AZ' ");
assertEquals(0, status);
- List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs();
+ List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
checkSingleTableInput(inputs);
HivePrivilegeObject tableObj = inputs.get(0);
assertEquals("no of columns used", 3, tableObj.getColumns().size());
@@ -123,7 +132,7 @@ public class TestHiveAuthorizerCheckInvo
int status = driver.compile("select * from " + tableName + " order by i");
assertEquals(0, status);
- List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs();
+ List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
checkSingleTableInput(inputs);
HivePrivilegeObject tableObj = inputs.get(0);
assertEquals("no of columns used", 5, tableObj.getColumns().size());
@@ -139,12 +148,60 @@ public class TestHiveAuthorizerCheckInvo
int status = driver.compile("describe " + tableName);
assertEquals(0, status);
- List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs();
+ List<HivePrivilegeObject> inputs = getHivePrivilegeObjectInputs().getLeft();
checkSingleTableInput(inputs);
HivePrivilegeObject tableObj = inputs.get(0);
assertNull("columns used", tableObj.getColumns());
}
+ @Test
+ public void testPermFunction() throws HiveAuthzPluginException, HiveAccessControlException,
+ CommandNeedRetryException {
+
+ reset(mockedAuthorizer);
+ final String funcName = "testauthfunc1";
+ int status = driver.compile("create function " + dbName + "." + funcName
+ + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
+ assertEquals(0, status);
+
+ List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
+
+ HivePrivilegeObject funcObj;
+ HivePrivilegeObject dbObj;
+ assertEquals("number of output object", 2, outputs.size());
+ if(outputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) {
+ funcObj = outputs.get(0);
+ dbObj = outputs.get(1);
+ } else {
+ funcObj = outputs.get(1);
+ dbObj = outputs.get(0);
+ }
+
+ assertEquals("input type", HivePrivilegeObjectType.FUNCTION, funcObj.getType());
+ assertTrue("function name", funcName.equalsIgnoreCase(funcObj.getObjectName()));
+ assertTrue("db name", dbName.equalsIgnoreCase(funcObj.getDbname()));
+
+ assertEquals("input type", HivePrivilegeObjectType.DATABASE, dbObj.getType());
+ assertTrue("db name", dbName.equalsIgnoreCase(dbObj.getDbname()));
+ }
+
+ @Test
+ public void testTempFunction() throws HiveAuthzPluginException, HiveAccessControlException,
+ CommandNeedRetryException {
+
+ reset(mockedAuthorizer);
+ final String funcName = "testAuthFunc2";
+ int status = driver.compile("create temporary function " + funcName
+ + " as 'org.apache.hadoop.hive.ql.udf.UDFPI'");
+ assertEquals(0, status);
+
+ List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
+ HivePrivilegeObject funcObj = outputs.get(0);
+ assertEquals("input type", HivePrivilegeObjectType.FUNCTION, funcObj.getType());
+ assertTrue("function name", funcName.equalsIgnoreCase(funcObj.getObjectName()));
+ assertEquals("db name", null, funcObj.getDbname());
+ }
+
private void checkSingleTableInput(List<HivePrivilegeObject> inputs) {
assertEquals("number of inputs", 1, inputs.size());
@@ -154,23 +211,26 @@ public class TestHiveAuthorizerCheckInvo
}
/**
- * @return the inputs passed in current call to authorizer.checkPrivileges
+ * @return pair with left value as inputs and right value as outputs,
+ * passed in current call to authorizer.checkPrivileges
* @throws HiveAuthzPluginException
* @throws HiveAccessControlException
*/
- private List<HivePrivilegeObject> getHivePrivilegeObjectInputs() throws HiveAuthzPluginException,
+ private Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> getHivePrivilegeObjectInputs() throws HiveAuthzPluginException,
HiveAccessControlException {
// Create argument capturer
// a class variable cast to this generic of generic class
Class<List<HivePrivilegeObject>> class_listPrivObjects = (Class) List.class;
ArgumentCaptor<List<HivePrivilegeObject>> inputsCapturer = ArgumentCaptor
.forClass(class_listPrivObjects);
+ ArgumentCaptor<List<HivePrivilegeObject>> outputsCapturer = ArgumentCaptor
+ .forClass(class_listPrivObjects);
verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class),
- inputsCapturer.capture(), Matchers.anyListOf(HivePrivilegeObject.class),
+ inputsCapturer.capture(), outputsCapturer.capture(),
any(HiveAuthzContext.class));
- return inputsCapturer.getValue();
+ return new ImmutablePair(inputsCapturer.getValue(), outputsCapturer.getValue());
}
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sat Aug 16 02:49:44 2014
@@ -754,6 +754,9 @@ public class Driver implements CommandPr
objName = privObject.getD();
break;
case FUNCTION:
+ if(privObject.getDatabase() != null) {
+ dbname = privObject.getDatabase().getName();
+ }
objName = privObject.getFunctionName();
break;
case DUMMYPARTITION:
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java Sat Aug 16 02:49:44 2014
@@ -333,6 +333,9 @@ public class Entity implements Serializa
case DUMMYPARTITION:
return p.getName();
case FUNCTION:
+ if (database != null) {
+ return database.getName() + "." + stringObject;
+ }
return stringObject;
default:
return d;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Sat Aug 16 02:49:44 2014
@@ -169,6 +169,7 @@ public class FunctionSemanticAnalyzer ex
try {
String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
String dbName = qualifiedNameParts[0];
+ functionName = qualifiedNameParts[1];
database = getDatabase(dbName);
} catch (HiveException e) {
LOG.error(e);
Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out Sat Aug 16 02:49:44 2014
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation CREATEFUNCTION [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=default], [ADMIN PRIVILEGE] on Object [type=FUNCTION, name=perm_fn]]
+FAILED: HiveAccessControlException Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation CREATEFUNCTION [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=default], [ADMIN PRIVILEGE] on Object [type=FUNCTION, name=default.perm_fn]]
Modified: hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out Sat Aug 16 02:49:44 2014
@@ -1,5 +1,5 @@
PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFileLookup' using file '../../data/files/sales.txt'
PREHOOK: type: CREATEFUNCTION
PREHOOK: Output: database:default
-PREHOOK: Output: lookup
+PREHOOK: Output: default.lookup
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. Hive warehouse is non-local, but ../../data/files/sales.txt specifies file on local filesystem. Resources on non-local warehouse should specify a non-local scheme/path
Modified: hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out Sat Aug 16 02:49:44 2014
@@ -1,6 +1,6 @@
PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFileLookup' using file 'nonexistent_file.txt'
PREHOOK: type: CREATEFUNCTION
PREHOOK: Output: database:default
-PREHOOK: Output: lookup
+PREHOOK: Output: default.lookup
nonexistent_file.txt does not exist
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. nonexistent_file.txt does not exist
Modified: hive/trunk/ql/src/test/results/clientpositive/authorization_create_func1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_create_func1.q.out?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_create_func1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_create_func1.q.out Sat Aug 16 02:49:44 2014
@@ -13,11 +13,11 @@ POSTHOOK: Output: temp_fn
PREHOOK: query: create function perm_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii'
PREHOOK: type: CREATEFUNCTION
PREHOOK: Output: database:default
-PREHOOK: Output: perm_fn
+PREHOOK: Output: default.perm_fn
POSTHOOK: query: create function perm_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii'
POSTHOOK: type: CREATEFUNCTION
POSTHOOK: Output: database:default
-POSTHOOK: Output: perm_fn
+POSTHOOK: Output: default.perm_fn
PREHOOK: query: drop temporary function temp_fn
PREHOOK: type: DROPFUNCTION
PREHOOK: Output: temp_fn
@@ -27,8 +27,8 @@ POSTHOOK: Output: temp_fn
PREHOOK: query: drop function perm_fn
PREHOOK: type: DROPFUNCTION
PREHOOK: Output: database:default
-PREHOOK: Output: perm_fn
+PREHOOK: Output: default.perm_fn
POSTHOOK: query: drop function perm_fn
POSTHOOK: type: DROPFUNCTION
POSTHOOK: Output: database:default
-POSTHOOK: Output: perm_fn
+POSTHOOK: Output: default.perm_fn
Modified: hive/trunk/ql/src/test/results/clientpositive/udf_using.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_using.q.out?rev=1618306&r1=1618305&r2=1618306&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_using.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_using.q.out Sat Aug 16 02:49:44 2014
@@ -1,11 +1,11 @@
#### A masked pattern was here ####
PREHOOK: type: CREATEFUNCTION
PREHOOK: Output: database:default
-PREHOOK: Output: lookup
+PREHOOK: Output: default.lookup
#### A masked pattern was here ####
POSTHOOK: type: CREATEFUNCTION
POSTHOOK: Output: database:default
-POSTHOOK: Output: lookup
+POSTHOOK: Output: default.lookup
PREHOOK: query: create table udf_using (c1 string)
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
@@ -44,9 +44,9 @@ POSTHOOK: Output: default@udf_using
PREHOOK: query: drop function lookup
PREHOOK: type: DROPFUNCTION
PREHOOK: Output: database:default
-PREHOOK: Output: lookup
+PREHOOK: Output: default.lookup
POSTHOOK: query: drop function lookup
POSTHOOK: type: DROPFUNCTION
POSTHOOK: Output: database:default
-POSTHOOK: Output: lookup
+POSTHOOK: Output: default.lookup
#### A masked pattern was here ####