You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/01/30 21:02:01 UTC
svn commit: r1562939 - in /hive/trunk:
itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/
itests/hive-unit/src/test/java/org/apache/hive/jdbc/
metastore/src/java/org/apache/hadoop/hive/metastore/
metastore/src/model/org/apache/hadoop/hive/meta...
Author: hashutosh
Date: Thu Jan 30 20:02:00 2014
New Revision: 1562939
URL: http://svn.apache.org/r1562939
Log:
HIVE-6122 : Implement show grant on <resource> (Navis via Ashutosh Chauhan)
Added:
hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java
hive/trunk/ql/src/test/queries/clientpositive/authorization_9.q
hive/trunk/ql/src/test/results/clientpositive/authorization_9.q.out
Modified:
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out
hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out
hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
hive/trunk/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out
hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out
hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out
Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java Thu Jan 30 20:02:00 2014
@@ -57,7 +57,7 @@ public class TestJdbcDriver extends Test
private static final String partitionedColumnName = "partcolabc";
private static final String partitionedColumnValue = "20090619";
private static final String partitionedTableComment = "Partitioned table";
- private static final String dataTypeTableName = "testDataTypeTable";
+ private static final String dataTypeTableName = "testdatatypetable";
private static final String dataTypeTableComment = "Table with many column data types";
private final HiveConf conf;
private final Path dataFilePath;
Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java Thu Jan 30 20:02:00 2014
@@ -78,7 +78,7 @@ public class TestJdbcDriver2 {
private static final String partitionedColumnName = "partcolabc";
private static final String partitionedColumnValue = "20090619";
private static final String partitionedTableComment = "Partitioned table";
- private static final String dataTypeTableName = "testDataTypeTable";
+ private static final String dataTypeTableName = "testdatatypetable";
private static final String dataTypeTableComment = "Table with many column data types";
private final HiveConf conf;
private final Path dataFilePath;
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Thu Jan 30 20:02:00 2014
@@ -3834,94 +3834,129 @@ public class HiveMetaStore extends Thrif
public List<HiveObjectPrivilege> list_privileges(String principalName,
PrincipalType principalType, HiveObjectRef hiveObject)
throws MetaException, TException {
+ if (hiveObject.getObjectType() == null) {
+ return getAllPrivileges(principalName, principalType);
+ }
if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) {
- return this.list_global_privileges(principalName, principalType);
- } else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) {
- return this.list_db_privileges(principalName, principalType, hiveObject
+ return list_global_privileges(principalName, principalType);
+ }
+ if (hiveObject.getObjectType() == HiveObjectType.DATABASE) {
+ return list_db_privileges(principalName, principalType, hiveObject
.getDbName());
- } else if (hiveObject.getObjectType() == HiveObjectType.TABLE) {
- return this.list_table_privileges(principalName, principalType,
+ }
+ if (hiveObject.getObjectType() == HiveObjectType.TABLE) {
+ return list_table_privileges(principalName, principalType,
hiveObject.getDbName(), hiveObject.getObjectName());
- } else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) {
- return this.list_partition_privileges(principalName, principalType,
+ }
+ if (hiveObject.getObjectType() == HiveObjectType.PARTITION) {
+ return list_partition_privileges(principalName, principalType,
hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject
- .getPartValues());
- } else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
- return this.list_column_privileges(principalName, principalType,
+ .getPartValues());
+ }
+ if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
+ if (hiveObject.getPartValues() == null || hiveObject.getPartValues().isEmpty()) {
+ return list_table_column_privileges(principalName, principalType,
+ hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject.getColumnName());
+ }
+ return list_partition_column_privileges(principalName, principalType,
hiveObject.getDbName(), hiveObject.getObjectName(), hiveObject
- .getPartValues(), hiveObject.getColumnName());
+ .getPartValues(), hiveObject.getColumnName());
}
return null;
}
- public List<HiveObjectPrivilege> list_column_privileges(
+ private List<HiveObjectPrivilege> getAllPrivileges(String principalName,
+ PrincipalType principalType) throws TException {
+ List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>();
+ privs.addAll(list_global_privileges(principalName, principalType));
+ privs.addAll(list_db_privileges(principalName, principalType, null));
+ privs.addAll(list_table_privileges(principalName, principalType, null, null));
+ privs.addAll(list_partition_privileges(principalName, principalType, null, null, null));
+ privs.addAll(list_table_column_privileges(principalName, principalType, null, null, null));
+ privs.addAll(list_partition_column_privileges(principalName, principalType,
+ null, null, null, null));
+ return privs;
+ }
+
+ public List<HiveObjectPrivilege> list_table_column_privileges(
+ final String principalName, final PrincipalType principalType,
+ final String dbName, final String tableName, final String columnName)
+ throws MetaException, TException {
+ incrementCounter("list_table_column_privileges");
+
+ try {
+ if (dbName == null) {
+ return getMS().listPrincipalTableColumnGrantsAll(principalName, principalType);
+ }
+ if (principalName == null) {
+ return getMS().listTableColumnGrantsAll(dbName, tableName, columnName);
+ }
+ List<MTableColumnPrivilege> mTableCols = getMS()
+ .listPrincipalTableColumnGrants(principalName, principalType,
+ dbName, tableName, columnName);
+ if (mTableCols.isEmpty()) {
+ return Collections.emptyList();
+ }
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (int i = 0; i < mTableCols.size(); i++) {
+ MTableColumnPrivilege sCol = mTableCols.get(i);
+ HiveObjectRef objectRef = new HiveObjectRef(
+ HiveObjectType.COLUMN, dbName, tableName, null, sCol.getColumnName());
+ HiveObjectPrivilege secObj = new HiveObjectPrivilege(
+ objectRef, sCol.getPrincipalName(), principalType,
+ new PrivilegeGrantInfo(sCol.getPrivilege(), sCol
+ .getCreateTime(), sCol.getGrantor(), PrincipalType
+ .valueOf(sCol.getGrantorType()), sCol
+ .getGrantOption()));
+ result.add(secObj);
+ }
+ return result;
+ } catch (MetaException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public List<HiveObjectPrivilege> list_partition_column_privileges(
final String principalName, final PrincipalType principalType,
final String dbName, final String tableName, final List<String> partValues,
final String columnName) throws MetaException, TException {
- incrementCounter("list_security_column_grant");
+ incrementCounter("list_partition_column_privileges");
- List<HiveObjectPrivilege> ret = null;
try {
- RawStore ms = getMS();
- String partName = null;
- if (partValues != null && partValues.size() > 0) {
- Table tbl = get_table(dbName, tableName);
- partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues);
- }
-
- List<HiveObjectPrivilege> result = Collections.<HiveObjectPrivilege> emptyList();
-
- if (partName != null) {
- Partition part = null;
- part = get_partition_by_name(dbName, tableName, partName);
- List<MPartitionColumnPrivilege> mPartitionCols = ms.listPrincipalPartitionColumnGrants(
- principalName,
- principalType, dbName, tableName, partName, columnName);
- if (mPartitionCols.size() > 0) {
- result = new ArrayList<HiveObjectPrivilege>();
- for (int i = 0; i < mPartitionCols.size(); i++) {
- MPartitionColumnPrivilege sCol = mPartitionCols.get(i);
- HiveObjectRef objectRef = new HiveObjectRef(
- HiveObjectType.COLUMN, dbName, tableName,
- part == null ? null : part.getValues(), sCol
- .getColumnName());
- HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
- sCol.getPrincipalName(), principalType,
- new PrivilegeGrantInfo(sCol.getPrivilege(), sCol
- .getCreateTime(), sCol.getGrantor(), PrincipalType
- .valueOf(sCol.getGrantorType()), sCol.getGrantOption()));
- result.add(secObj);
- }
- }
- } else {
- List<MTableColumnPrivilege> mTableCols = ms
- .listPrincipalTableColumnGrants(principalName, principalType,
- dbName, tableName, columnName);
- if (mTableCols.size() > 0) {
- result = new ArrayList<HiveObjectPrivilege>();
- for (int i = 0; i < mTableCols.size(); i++) {
- MTableColumnPrivilege sCol = mTableCols.get(i);
- HiveObjectRef objectRef = new HiveObjectRef(
- HiveObjectType.COLUMN, dbName, tableName, null, sCol
- .getColumnName());
- HiveObjectPrivilege secObj = new HiveObjectPrivilege(
- objectRef, sCol.getPrincipalName(), principalType,
- new PrivilegeGrantInfo(sCol.getPrivilege(), sCol
- .getCreateTime(), sCol.getGrantor(), PrincipalType
- .valueOf(sCol.getGrantorType()), sCol
- .getGrantOption()));
- result.add(secObj);
- }
- }
+ if (dbName == null) {
+ return getMS().listPrincipalPartitionColumnGrantsAll(principalName, principalType);
}
-
- ret = result;
+ Table tbl = get_table(dbName, tableName);
+ String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues);
+ if (principalName == null) {
+ return getMS().listPartitionColumnGrantsAll(dbName, tableName, partName, columnName);
+ }
+ List<MPartitionColumnPrivilege> mPartitionCols = getMS().listPrincipalPartitionColumnGrants(
+ principalName,
+ principalType, dbName, tableName, partName, columnName);
+ if (mPartitionCols.isEmpty()) {
+ return Collections.emptyList();
+ }
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (int i = 0; i < mPartitionCols.size(); i++) {
+ MPartitionColumnPrivilege sCol = mPartitionCols.get(i);
+ HiveObjectRef objectRef = new HiveObjectRef(
+ HiveObjectType.COLUMN, dbName, tableName, partValues, sCol.getColumnName());
+ HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
+ sCol.getPrincipalName(), principalType,
+ new PrivilegeGrantInfo(sCol.getPrivilege(), sCol
+ .getCreateTime(), sCol.getGrantor(), PrincipalType
+ .valueOf(sCol.getGrantorType()), sCol.getGrantOption()));
+ result.add(secObj);
+ }
+ return result;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
- return ret;
}
public List<HiveObjectPrivilege> list_db_privileges(final String principalName,
@@ -3930,25 +3965,30 @@ public class HiveMetaStore extends Thrif
incrementCounter("list_security_db_grant");
try {
- RawStore ms = getMS();
- List<MDBPrivilege> mDbs = ms.listPrincipalDBGrants(
+ if (dbName == null) {
+ return getMS().listPrincipalDBGrantsAll(principalName, principalType);
+ }
+ if (principalName == null) {
+ return getMS().listDBGrantsAll(dbName);
+ }
+ List<MDBPrivilege> mDbs = getMS().listPrincipalDBGrants(
principalName, principalType, dbName);
- if (mDbs.size() > 0) {
- List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
- for (int i = 0; i < mDbs.size(); i++) {
- MDBPrivilege sDB = mDbs.get(i);
- HiveObjectRef objectRef = new HiveObjectRef(
- HiveObjectType.DATABASE, dbName, null, null, null);
- HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
- sDB.getPrincipalName(), principalType,
- new PrivilegeGrantInfo(sDB.getPrivilege(), sDB
- .getCreateTime(), sDB.getGrantor(), PrincipalType
- .valueOf(sDB.getGrantorType()), sDB.getGrantOption()));
- result.add(secObj);
- }
- return result;
+ if (mDbs.isEmpty()) {
+ return Collections.<HiveObjectPrivilege>emptyList();
+ }
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (int i = 0; i < mDbs.size(); i++) {
+ MDBPrivilege sDB = mDbs.get(i);
+ HiveObjectRef objectRef = new HiveObjectRef(
+ HiveObjectType.DATABASE, dbName, null, null, null);
+ HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
+ sDB.getPrincipalName(), principalType,
+ new PrivilegeGrantInfo(sDB.getPrivilege(), sDB
+ .getCreateTime(), sDB.getGrantor(), PrincipalType
+ .valueOf(sDB.getGrantorType()), sDB.getGrantOption()));
+ result.add(secObj);
}
- return Collections.<HiveObjectPrivilege> emptyList();
+ return result;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -3963,30 +4003,34 @@ public class HiveMetaStore extends Thrif
incrementCounter("list_security_partition_grant");
try {
- RawStore ms = getMS();
+ if (dbName == null) {
+ return getMS().listPrincipalPartitionGrantsAll(principalName, principalType);
+ }
Table tbl = get_table(dbName, tableName);
String partName = Warehouse.makePartName(tbl.getPartitionKeys(), partValues);
- List<MPartitionPrivilege> mParts = ms.listPrincipalPartitionGrants(
+ if (principalName == null) {
+ return getMS().listPartitionGrantsAll(dbName, tableName, partName);
+ }
+ List<MPartitionPrivilege> mParts = getMS().listPrincipalPartitionGrants(
principalName, principalType, dbName, tableName, partName);
- if (mParts.size() > 0) {
- List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
- for (int i = 0; i < mParts.size(); i++) {
- MPartitionPrivilege sPart = mParts.get(i);
- HiveObjectRef objectRef = new HiveObjectRef(
- HiveObjectType.PARTITION, dbName, tableName, partValues,
- null);
- HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
- sPart.getPrincipalName(), principalType,
- new PrivilegeGrantInfo(sPart.getPrivilege(), sPart
- .getCreateTime(), sPart.getGrantor(), PrincipalType
- .valueOf(sPart.getGrantorType()), sPart
- .getGrantOption()));
+ if (mParts.isEmpty()) {
+ return Collections.<HiveObjectPrivilege> emptyList();
+ }
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (int i = 0; i < mParts.size(); i++) {
+ MPartitionPrivilege sPart = mParts.get(i);
+ HiveObjectRef objectRef = new HiveObjectRef(
+ HiveObjectType.PARTITION, dbName, tableName, partValues, null);
+ HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
+ sPart.getPrincipalName(), principalType,
+ new PrivilegeGrantInfo(sPart.getPrivilege(), sPart
+ .getCreateTime(), sPart.getGrantor(), PrincipalType
+ .valueOf(sPart.getGrantorType()), sPart
+ .getGrantOption()));
- result.add(secObj);
- }
- return result;
+ result.add(secObj);
}
- return Collections.<HiveObjectPrivilege> emptyList();
+ return result;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -4001,24 +4045,30 @@ public class HiveMetaStore extends Thrif
incrementCounter("list_security_table_grant");
try {
+ if (dbName == null) {
+ return getMS().listPrincipalTableGrantsAll(principalName, principalType);
+ }
+ if (principalName == null) {
+ return getMS().listTableGrantsAll(dbName, tableName);
+ }
List<MTablePrivilege> mTbls = getMS()
.listAllTableGrants(principalName, principalType, dbName, tableName);
- if (mTbls.size() > 0) {
- List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
- for (int i = 0; i < mTbls.size(); i++) {
- MTablePrivilege sTbl = mTbls.get(i);
- HiveObjectRef objectRef = new HiveObjectRef(
- HiveObjectType.TABLE, dbName, tableName, null, null);
- HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
- sTbl.getPrincipalName(), principalType,
- new PrivilegeGrantInfo(sTbl.getPrivilege(), sTbl.getCreateTime(), sTbl
- .getGrantor(), PrincipalType.valueOf(sTbl
- .getGrantorType()), sTbl.getGrantOption()));
- result.add(secObj);
- }
- return result;
+ if (mTbls.isEmpty()) {
+ return Collections.<HiveObjectPrivilege> emptyList();
+ }
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (int i = 0; i < mTbls.size(); i++) {
+ MTablePrivilege sTbl = mTbls.get(i);
+ HiveObjectRef objectRef = new HiveObjectRef(
+ HiveObjectType.TABLE, dbName, tableName, null, null);
+ HiveObjectPrivilege secObj = new HiveObjectPrivilege(objectRef,
+ sTbl.getPrincipalName(), principalType,
+ new PrivilegeGrantInfo(sTbl.getPrivilege(), sTbl.getCreateTime(), sTbl
+ .getGrantor(), PrincipalType.valueOf(sTbl
+ .getGrantorType()), sTbl.getGrantOption()));
+ result.add(secObj);
}
- return Collections.<HiveObjectPrivilege> emptyList();
+ return result;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
@@ -4032,24 +4082,27 @@ public class HiveMetaStore extends Thrif
incrementCounter("list_security_user_grant");
try {
+ if (principalName == null) {
+ return getMS().listGlobalGrantsAll();
+ }
List<MGlobalPrivilege> mUsers = getMS().listPrincipalGlobalGrants(
principalName, principalType);
- if (mUsers.size() > 0) {
- List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
- for (int i = 0; i < mUsers.size(); i++) {
- MGlobalPrivilege sUsr = mUsers.get(i);
- HiveObjectRef objectRef = new HiveObjectRef(
- HiveObjectType.GLOBAL, null, null, null, null);
- HiveObjectPrivilege secUser = new HiveObjectPrivilege(
- objectRef, sUsr.getPrincipalName(), principalType,
- new PrivilegeGrantInfo(sUsr.getPrivilege(), sUsr
- .getCreateTime(), sUsr.getGrantor(), PrincipalType
- .valueOf(sUsr.getGrantorType()), sUsr.getGrantOption()));
- result.add(secUser);
- }
- return result;
+ if (mUsers.isEmpty()) {
+ return Collections.<HiveObjectPrivilege> emptyList();
+ }
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (int i = 0; i < mUsers.size(); i++) {
+ MGlobalPrivilege sUsr = mUsers.get(i);
+ HiveObjectRef objectRef = new HiveObjectRef(
+ HiveObjectType.GLOBAL, null, null, null, null);
+ HiveObjectPrivilege secUser = new HiveObjectPrivilege(
+ objectRef, sUsr.getPrincipalName(), principalType,
+ new PrivilegeGrantInfo(sUsr.getPrivilege(), sUsr
+ .getCreateTime(), sUsr.getGrantor(), PrincipalType
+ .valueOf(sUsr.getGrantorType()), sUsr.getGrantOption()));
+ result.add(secUser);
}
- return Collections.<HiveObjectPrivilege> emptyList();
+ return result;
} catch (MetaException e) {
throw e;
} catch (Exception e) {
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Thu Jan 30 20:02:00 2014
@@ -3935,6 +3935,38 @@ public class ObjectStore implements RawS
return userNameDbPriv;
}
+ @Override
+ public List<HiveObjectPrivilege> listGlobalGrantsAll() {
+ boolean commited = false;
+ try {
+ openTransaction();
+ Query query = pm.newQuery(MGlobalPrivilege.class);
+ List<MGlobalPrivilege> userNameDbPriv = (List<MGlobalPrivilege>) query.execute();
+ pm.retrieveAll(userNameDbPriv);
+ commited = commitTransaction();
+ return convertGlobal(userNameDbPriv);
+ } finally {
+ if (!commited) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ private List<HiveObjectPrivilege> convertGlobal(List<MGlobalPrivilege> privs) {
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (MGlobalPrivilege priv : privs) {
+ String pname = priv.getPrincipalName();
+ PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
+
+ HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null);
+ PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
+ priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
+
+ result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
+ }
+ return result;
+ }
+
@SuppressWarnings("unchecked")
@Override
public List<MDBPrivilege> listPrincipalDBGrants(String principalName,
@@ -3963,6 +3995,34 @@ public class ObjectStore implements RawS
return mSecurityDBList;
}
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return convertDB(listPrincipalAllDBGrant(principalName, principalType));
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) {
+ return convertDB(listDatabaseGrants(dbName));
+ }
+
+ private List<HiveObjectPrivilege> convertDB(List<MDBPrivilege> privs) {
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (MDBPrivilege priv : privs) {
+ String pname = priv.getPrincipalName();
+ PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
+ String database = priv.getDatabase().getName();
+
+ HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.DATABASE, database,
+ null, null, null);
+ PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
+ priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
+
+ result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
+ }
+ return result;
+ }
+
@SuppressWarnings("unchecked")
private List<MDBPrivilege> listPrincipalAllDBGrant(
String principalName, PrincipalType principalType) {
@@ -4329,6 +4389,78 @@ public class ObjectStore implements RawS
return mSecurityColList;
}
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
+ String principalName, PrincipalType principalType) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPrincipalPartitionColumnGrantsAll");
+ Query query = pm.newQuery(MPartitionColumnPrivilege.class,
+ "principalName == t1 && principalType == t2");
+ query.declareParameters("java.lang.String t1, java.lang.String t2");
+ List<MPartitionColumnPrivilege> mSecurityTabPartList = (List<MPartitionColumnPrivilege>)
+ query.executeWithArray(principalName, principalType.toString());
+ LOG.debug("Done executing query for listPrincipalPartitionColumnGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertPartCols(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalPartitionColumnGrantsAll");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(
+ String dbName, String tableName, String partitionName, String columnName) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPartitionColumnGrantsAll");
+ Query query = pm.newQuery(MPartitionColumnPrivilege.class,
+ "partition.table.tableName == t3 && partition.table.database.name == t4 && " +
+ "partition.partitionName == t5 && columnName == t6");
+ query.declareParameters(
+ "java.lang.String t3, java.lang.String t4, java.lang.String t5, java.lang.String t6");
+ List<MPartitionColumnPrivilege> mSecurityTabPartList = (List<MPartitionColumnPrivilege>)
+ query.executeWithArray(tableName, dbName, partitionName, columnName);
+ LOG.debug("Done executing query for listPartitionColumnGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertPartCols(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPartitionColumnGrantsAll");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ private List<HiveObjectPrivilege> convertPartCols(List<MPartitionColumnPrivilege> privs) {
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (MPartitionColumnPrivilege priv : privs) {
+ String pname = priv.getPrincipalName();
+ PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
+
+ MPartition mpartition = priv.getPartition();
+ MTable mtable = mpartition.getTable();
+ MDatabase mdatabase = mtable.getDatabase();
+
+ HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN,
+ mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), priv.getColumnName());
+ PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
+ priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
+
+ result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
+ }
+ return result;
+ }
+
@SuppressWarnings("unchecked")
private List<MTablePrivilege> listPrincipalAllTableGrants(
String principalName, PrincipalType principalType) {
@@ -4356,6 +4488,74 @@ public class ObjectStore implements RawS
return mSecurityTabPartList;
}
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
+ String principalName, PrincipalType principalType) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPrincipalAllTableGrants");
+ Query query = pm.newQuery(MTablePrivilege.class,
+ "principalName == t1 && principalType == t2");
+ query.declareParameters("java.lang.String t1, java.lang.String t2");
+ List<MTablePrivilege> mSecurityTabPartList = (List<MTablePrivilege>) query.execute(
+ principalName, principalType.toString());
+ LOG.debug("Done executing query for listPrincipalAllTableGrants");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertTable(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listTableGrantsAll");
+ Query query = pm.newQuery(MTablePrivilege.class,
+ "table.tableName == t1 && table.database.name == t2");
+ query.declareParameters("java.lang.String t1, java.lang.String t2");
+ List<MTablePrivilege> mSecurityTabPartList = (List<MTablePrivilege>)
+ query.executeWithArray(tableName, dbName);
+ LOG.debug("Done executing query for listTableGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertTable(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ private List<HiveObjectPrivilege> convertTable(List<MTablePrivilege> privs) {
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (MTablePrivilege priv : privs) {
+ String pname = priv.getPrincipalName();
+ PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
+
+ String table = priv.getTable().getTableName();
+ String database = priv.getTable().getDatabase().getName();
+
+ HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.TABLE, database, table,
+ null, null);
+ PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
+ priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
+
+ result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
+ }
+ return result;
+ }
+
@SuppressWarnings("unchecked")
private List<MPartitionPrivilege> listPrincipalAllPartitionGrants(
String principalName, PrincipalType principalType) {
@@ -4383,6 +4583,77 @@ public class ObjectStore implements RawS
return mSecurityTabPartList;
}
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
+ String principalName, PrincipalType principalType) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPrincipalPartitionGrantsAll");
+ Query query = pm.newQuery(MPartitionPrivilege.class,
+ "principalName == t1 && principalType == t2");
+ query.declareParameters("java.lang.String t1, java.lang.String t2");
+ List<MPartitionPrivilege> mSecurityTabPartList = (List<MPartitionPrivilege>)
+ query.execute(principalName, principalType.toString());
+ LOG.debug("Done executing query for listPrincipalPartitionGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertPartition(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPartitionGrantsAll(
+ String dbName, String tableName, String partitionName) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPrincipalPartitionGrantsAll");
+ Query query = pm.newQuery(MPartitionPrivilege.class,
+ "partition.table.tableName == t3 && partition.table.database.name == t4 && " +
+ "partition.partitionName == t5");
+ query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5");
+ List<MPartitionPrivilege> mSecurityTabPartList = (List<MPartitionPrivilege>)
+ query.executeWithArray(tableName, dbName, partitionName);
+ LOG.debug("Done executing query for listPrincipalPartitionGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertPartition(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ private List<HiveObjectPrivilege> convertPartition(List<MPartitionPrivilege> privs) {
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (MPartitionPrivilege priv : privs) {
+ String pname = priv.getPrincipalName();
+ PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
+
+ MPartition mpartition = priv.getPartition();
+ MTable mtable = mpartition.getTable();
+ MDatabase mdatabase = mtable.getDatabase();
+
+ HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.PARTITION,
+ mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), null);
+ PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
+ priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
+
+ result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
+ }
+ return result;
+ }
+
@SuppressWarnings("unchecked")
private List<MTableColumnPrivilege> listPrincipalAllTableColumnGrants(
String principalName, PrincipalType principalType) {
@@ -4409,6 +4680,75 @@ public class ObjectStore implements RawS
return mSecurityColumnList;
}
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
+ String principalName, PrincipalType principalType) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPrincipalTableColumnGrantsAll");
+ Query query = pm.newQuery(MTableColumnPrivilege.class,
+ "principalName == t1 && principalType == t2");
+ query.declareParameters("java.lang.String t1, java.lang.String t2");
+ List<MTableColumnPrivilege> mSecurityTabPartList = (List<MTableColumnPrivilege>)
+ query.execute(principalName, principalType.toString());
+ LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertTableCols(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listTableColumnGrantsAll(
+ String dbName, String tableName, String columnName) {
+ boolean success = false;
+ try {
+ openTransaction();
+ LOG.debug("Executing listPrincipalTableColumnGrantsAll");
+ Query query = pm.newQuery(MTableColumnPrivilege.class,
+ "table.tableName == t3 && table.database.name == t4 && columnName == t5");
+ query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5");
+ List<MTableColumnPrivilege> mSecurityTabPartList = (List<MTableColumnPrivilege>)
+ query.executeWithArray(tableName, dbName, columnName);
+ LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll");
+ pm.retrieveAll(mSecurityTabPartList);
+ List<HiveObjectPrivilege> result = convertTableCols(mSecurityTabPartList);
+ success = commitTransaction();
+ LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll");
+ return result;
+ } finally {
+ if (!success) {
+ rollbackTransaction();
+ }
+ }
+ }
+
+ private List<HiveObjectPrivilege> convertTableCols(List<MTableColumnPrivilege> privs) {
+ List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
+ for (MTableColumnPrivilege priv : privs) {
+ String pname = priv.getPrincipalName();
+ PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
+
+ MTable mtable = priv.getTable();
+ MDatabase mdatabase = mtable.getDatabase();
+
+ HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN,
+ mdatabase.getName(), mtable.getTableName(), null, priv.getColumnName());
+ PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
+ priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
+
+ result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
+ }
+ return result;
+ }
+
@SuppressWarnings("unchecked")
private List<MPartitionColumnPrivilege> listPrincipalAllPartitionColumnGrants(
String principalName, PrincipalType principalType) {
Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java Thu Jan 30 20:02:00 2014
@@ -29,6 +29,7 @@ import java.util.Set;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
@@ -428,28 +429,58 @@ public interface RawStore extends Config
String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException;
- public abstract long cleanupEvents();
+ public abstract long cleanupEvents();
- public abstract boolean addToken(String tokenIdentifier, String delegationToken);
+ public abstract boolean addToken(String tokenIdentifier, String delegationToken);
- public abstract boolean removeToken(String tokenIdentifier);
+ public abstract boolean removeToken(String tokenIdentifier);
- public abstract String getToken(String tokenIdentifier);
+ public abstract String getToken(String tokenIdentifier);
- public abstract List<String> getAllTokenIdentifiers();
+ public abstract List<String> getAllTokenIdentifiers();
- public abstract int addMasterKey(String key) throws MetaException;
+ public abstract int addMasterKey(String key) throws MetaException;
- public abstract void updateMasterKey(Integer seqNo, String key)
+ public abstract void updateMasterKey(Integer seqNo, String key)
throws NoSuchObjectException, MetaException;
- public abstract boolean removeMasterKey(Integer keySeq);
+ public abstract boolean removeMasterKey(Integer keySeq);
- public abstract String[] getMasterKeys();
+ public abstract String[] getMasterKeys();
- public abstract void verifySchema() throws MetaException;
+ public abstract void verifySchema() throws MetaException;
- public abstract String getMetaStoreSchemaVersion() throws MetaException;
+ public abstract String getMetaStoreSchemaVersion() throws MetaException;
- public abstract void setMetaStoreSchemaVersion(String version, String comment) throws MetaException;
+ public abstract void setMetaStoreSchemaVersion(String version, String comment) throws MetaException;
+
+ List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
+ String principalName, PrincipalType principalType);
+
+ List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
+ String principalName, PrincipalType principalType);
+
+ List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
+ String principalName, PrincipalType principalType);
+
+ List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
+ String principalName, PrincipalType principalType);
+
+ List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
+ String principalName, PrincipalType principalType);
+
+ List<HiveObjectPrivilege> listGlobalGrantsAll();
+
+ List<HiveObjectPrivilege> listDBGrantsAll(String dbName);
+
+ List<HiveObjectPrivilege> listPartitionColumnGrantsAll(
+ String dbName, String tableName, String partitionName, String columnName);
+
+ List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName);
+
+ List<HiveObjectPrivilege> listPartitionGrantsAll(
+ String dbName, String tableName, String partitionName);
+
+ List<HiveObjectPrivilege> listTableColumnGrantsAll(
+ String dbName, String tableName, String columnName);
}
Added: hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java?rev=1562939&view=auto
==============================================================================
--- hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java (added)
+++ hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPrincipalDesc.java Thu Jan 30 20:02:00 2014
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.model;
+
+public class MPrincipalDesc {
+
+ private String name;
+ private String type;
+
+ public MPrincipalDesc() {}
+
+ public MPrincipalDesc(String name, String type) {
+ this.name = name;
+ this.type = type;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ @Override
+ public int hashCode() {
+ return type.hashCode() + name.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object object) {
+ MPrincipalDesc another = (MPrincipalDesc) object;
+ return type.equals(another.type) && name.equals(another.name);
+ }
+}
Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java (original)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java Thu Jan 30 20:02:00 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configurab
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
@@ -493,6 +494,69 @@ public class DummyRawStoreControlledComm
}
@Override
+ public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return objectStore.listPrincipalDBGrantsAll(principalName, principalType);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return objectStore.listPrincipalTableGrantsAll(principalName, principalType);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return objectStore.listPrincipalPartitionGrantsAll(principalName, principalType);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return objectStore.listPrincipalTableColumnGrantsAll(principalName, principalType);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return objectStore.listPrincipalPartitionColumnGrantsAll(principalName, principalType);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listGlobalGrantsAll() {
+ return objectStore.listGlobalGrantsAll();
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) {
+ return objectStore.listDBGrantsAll(dbName);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(String dbName, String tableName,
+ String partitionName, String columnName) {
+ return objectStore.listPartitionColumnGrantsAll(dbName, tableName, partitionName, columnName);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) {
+ return objectStore.listTableGrantsAll(dbName, tableName);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPartitionGrantsAll(String dbName, String tableName,
+ String partitionName) {
+ return objectStore.listPartitionGrantsAll(dbName, tableName, partitionName);
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listTableColumnGrantsAll(String dbName, String tableName,
+ String columnName) {
+ return objectStore.listTableColumnGrantsAll(dbName, tableName, columnName);
+ }
+
+ @Override
public ColumnStatistics getTableColumnStatistics(String dbName, String tableName,
List<String> colNames) throws MetaException, NoSuchObjectException {
return objectStore.getTableColumnStatistics(dbName, tableName, colNames);
Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java (original)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java Thu Jan 30 20:02:00 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
@@ -562,6 +563,66 @@ public class DummyRawStoreForJdoConnecti
}
@Override
+ public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
+ String principalName, PrincipalType principalType) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listGlobalGrantsAll() {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(String dbName, String tableName, String partitionName, String columnName) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listPartitionGrantsAll(String dbName, String tableName, String partitionName) {
+ return null;
+ }
+
+ @Override
+ public List<HiveObjectPrivilege> listTableColumnGrantsAll(String dbName, String tableName, String columnName) {
+ return null;
+ }
+
+ @Override
public ColumnStatistics getTableColumnStatistics(String dbName, String tableName,
List<String> colName) throws MetaException, NoSuchObjectException {
return null;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Thu Jan 30 20:02:00 2014
@@ -547,18 +547,18 @@ public class DDLTask extends Task<DDLWor
if(SessionState.get().isAuthorizationModeV2()){
return showGrantsV2(showGrantDesc);
}
- StringBuilder builder = new StringBuilder();
+
+ PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc();
+ PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj();
+ String principalName = principalDesc == null ? null : principalDesc.getName();
+ PrincipalType type = principalDesc == null ? null : principalDesc.getType();
+ List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>();
try {
- PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc();
- PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj();
- String principalName = principalDesc.getName();
if (hiveObjectDesc == null) {
- //show all privileges for this user
- List<HiveObjectPrivilege> users = db.showPrivilegeGrant(
- HiveObjectType.GLOBAL, principalName, principalDesc.getType(),
- null, null, null, null);
- writeGrantInfo(builder, principalDesc.getType(), principalName,
- null, null, null, null, users);
+ privs.addAll(db.showPrivilegeGrant(HiveObjectType.GLOBAL, principalName, type,
+ null, null, null, null));
+ } else if (hiveObjectDesc != null && hiveObjectDesc.getObject() == null) {
+ privs.addAll(db.showPrivilegeGrant(null, principalName, type, null, null, null, null));
} else {
String obj = hiveObjectDesc.getObject();
boolean notFound = true;
@@ -598,40 +598,31 @@ public class DDLTask extends Task<DDLWor
if (!hiveObjectDesc.getTable()) {
// show database level privileges
- List<HiveObjectPrivilege> dbs = db.showPrivilegeGrant(HiveObjectType.DATABASE, principalName,
- principalDesc.getType(), dbName, null, null, null);
- writeGrantInfo(builder, principalDesc.getType(), principalName,
- dbName, null, null, null, dbs);
+ privs.addAll(db.showPrivilegeGrant(HiveObjectType.DATABASE,
+ principalName, type, dbName, null, null, null));
} else {
if (showGrantDesc.getColumns() != null) {
// show column level privileges
for (String columnName : showGrantDesc.getColumns()) {
- List<HiveObjectPrivilege> columnss = db.showPrivilegeGrant(
+ privs.addAll(db.showPrivilegeGrant(
HiveObjectType.COLUMN, principalName,
- principalDesc.getType(), dbName, tableName, partValues,
- columnName);
- writeGrantInfo(builder, principalDesc.getType(),
- principalName, dbName, tableName, partName, columnName,
- columnss);
+ type, dbName, tableName, partValues,
+ columnName));
}
} else if (hiveObjectDesc.getPartSpec() != null) {
// show partition level privileges
- List<HiveObjectPrivilege> parts = db.showPrivilegeGrant(
- HiveObjectType.PARTITION, principalName, principalDesc
- .getType(), dbName, tableName, partValues, null);
- writeGrantInfo(builder, principalDesc.getType(),
- principalName, dbName, tableName, partName, null, parts);
+ privs.addAll(db.showPrivilegeGrant(
+ HiveObjectType.PARTITION, principalName, type,
+ dbName, tableName, partValues, null));
} else {
// show table level privileges
- List<HiveObjectPrivilege> tbls = db.showPrivilegeGrant(
- HiveObjectType.TABLE, principalName, principalDesc.getType(),
- dbName, tableName, null, null);
- writeGrantInfo(builder, principalDesc.getType(),
- principalName, dbName, tableName, null, null, tbls);
+ privs.addAll(db.showPrivilegeGrant(
+ HiveObjectType.TABLE, principalName, type,
+ dbName, tableName, null, null));
}
}
}
- writeToFile(builder.toString(), showGrantDesc.getResFile());
+ writeToFile(writeGrantInfo(privs), showGrantDesc.getResFile());
} catch (FileNotFoundException e) {
LOG.info("show table status: " + stringifyException(e));
return 1;
@@ -647,12 +638,12 @@ public class DDLTask extends Task<DDLWor
private int showGrantsV2(ShowGrantDesc showGrantDesc) throws HiveException {
HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
- StringBuilder builder = new StringBuilder();
try {
List<HivePrivilegeInfo> privInfos = authorizer.showPrivileges(
getHivePrincipal(showGrantDesc.getPrincipalDesc()),
getHivePrivilegeObject(showGrantDesc.getHiveObj())
);
+ List<HiveObjectPrivilege> privList = new ArrayList<HiveObjectPrivilege>();
for(HivePrivilegeInfo privInfo : privInfos){
HivePrincipal principal = privInfo.getPrincipal();
HivePrivilegeObject privObj = privInfo.getObject();
@@ -663,20 +654,13 @@ public class DDLTask extends Task<DDLWor
privInfo.isGrantOption());
//only grantInfo is used
- HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(null, null, null, grantInfo);
- List<HiveObjectPrivilege> privList = new ArrayList<HiveObjectPrivilege>();
+ HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(new HiveObjectRef(
+ AuthorizationUtils.getThriftHiveObjType(privObj.getType()),privObj.getDbname(),
+ privObj.getTableviewname(),null,null), principal.getName(),
+ AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
privList.add(thriftObjectPriv);
- writeGrantInfo(builder,
- AuthorizationUtils.getThriftPrincipalType(principal.getType()),
- principal.getName(),
- privObj.getDbname(),
- privObj.getTableviewname(),
- null,
- null,
- privList
- );
}
- writeToFile(builder.toString(), showGrantDesc.getResFile());
+ writeToFile(writeGrantInfo(privList), showGrantDesc.getResFile());
} catch (IOException e) {
throw new HiveException("Error in show grant statement", e);
}
@@ -3076,48 +3060,55 @@ public class DDLTask extends Task<DDLWor
}
}
- public static void writeGrantInfo(StringBuilder builder,
- PrincipalType principalType, String principalName, String dbName,
- String tableName, String partName, String columnName,
- List<HiveObjectPrivilege> privileges) throws IOException {
+ static String writeGrantInfo(List<HiveObjectPrivilege> privileges) {
if (privileges == null || privileges.isEmpty()) {
- return;
+ return "";
}
- sortPrivileges(privileges);
-
+ StringBuilder builder = new StringBuilder();
for (HiveObjectPrivilege privilege : privileges) {
PrivilegeGrantInfo grantInfo = privilege.getGrantInfo();
+ HiveObjectRef resource = privilege.getHiveObject();
String privName = grantInfo.getPrivilege();
long unixTimestamp = grantInfo.getCreateTime() * 1000L;
Date createTime = new Date(unixTimestamp);
String grantor = grantInfo.getGrantor();
- if (dbName != null) {
- writeKeyValuePair(builder, "database", dbName);
- }
- if (tableName != null) {
- writeKeyValuePair(builder, "table", tableName);
- }
- if (partName != null) {
- writeKeyValuePair(builder, "partition", partName);
- }
- if (columnName != null) {
- writeKeyValuePair(builder, "columnName", columnName);
+ switch (resource.getObjectType()) {
+ case DATABASE:
+ writeKeyValuePair(builder, "database", resource.getDbName());
+ break;
+ case TABLE:
+ writeKeyValuePair(builder, "database", resource.getDbName());
+ writeKeyValuePair(builder, "table", resource.getObjectName());
+ break;
+ case PARTITION:
+ writeKeyValuePair(builder, "database", resource.getDbName());
+ writeKeyValuePair(builder, "table", resource.getObjectName());
+ writeKeyValuePair(builder, "partition", String.valueOf(resource.getPartValues()));
+ break;
+ case COLUMN:
+ writeKeyValuePair(builder, "database", resource.getDbName());
+ writeKeyValuePair(builder, "table", resource.getObjectName());
+ if (resource.getPartValues() != null && !resource.getPartValues().isEmpty()) {
+ writeKeyValuePair(builder, "partition", String.valueOf(resource.getPartValues()));
+ }
+ writeKeyValuePair(builder, "columnName", resource.getColumnName());
+ break;
}
- writeKeyValuePair(builder, "principalName", principalName);
- writeKeyValuePair(builder, "principalType", "" + principalType);
+ writeKeyValuePair(builder, "principalName", privilege.getPrincipalName());
+ writeKeyValuePair(builder, "principalType", "" + privilege.getPrincipalType());
writeKeyValuePair(builder, "privilege", privName);
writeKeyValuePair(builder, "grantTime", "" + createTime);
if (grantor != null) {
writeKeyValuePair(builder, "grantor", grantor);
}
}
+ return builder.toString();
}
- private static void writeKeyValuePair(StringBuilder builder, String key,
- String value) throws IOException {
+ private static void writeKeyValuePair(StringBuilder builder, String key, String value) {
if (builder.length() > 0) {
builder.append((char)terminator);
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Thu Jan 30 20:02:00 2014
@@ -261,6 +261,7 @@ TOK_PRINCIPAL_NAME;
TOK_USER;
TOK_GROUP;
TOK_ROLE;
+TOK_RESOURCE_ALL;
TOK_GRANT_WITH_OPTION;
TOK_GRANT_WITH_ADMIN_OPTION;
TOK_PRIV_ALL;
@@ -1375,14 +1376,15 @@ showRoles
showGrants
@init {pushMsg("show grants", state);}
@after {popMsg(state);}
- : KW_SHOW KW_GRANT principalName privilegeIncludeColObject?
- -> ^(TOK_SHOW_GRANT principalName privilegeIncludeColObject?)
+ : KW_SHOW KW_GRANT principalName? (KW_ON privilegeIncludeColObject)?
+ -> ^(TOK_SHOW_GRANT principalName? privilegeIncludeColObject?)
;
privilegeIncludeColObject
@init {pushMsg("privilege object including columns", state);}
@after {popMsg(state);}
- : KW_ON privObjectType identifier (LPAREN cols=columnNameList RPAREN)? partitionSpec?
+ : KW_ALL -> ^(TOK_RESOURCE_ALL)
+ | privObjectType identifier (LPAREN cols=columnNameList RPAREN)? partitionSpec?
-> ^(TOK_PRIV_OBJECT_COL identifier privObjectType $cols? partitionSpec?)
;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java Thu Jan 30 20:02:00 2014
@@ -154,59 +154,74 @@ public class HiveAuthorizationTaskFactor
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) throws SemanticException {
+
+ PrincipalDesc principalDesc = null;
PrivilegeObjectDesc privHiveObj = null;
+ List<String> cols = null;
- ASTNode principal = (ASTNode) ast.getChild(0);
- PrincipalType type = PrincipalType.USER;
- switch (principal.getType()) {
- case HiveParser.TOK_USER:
- type = PrincipalType.USER;
- break;
- case HiveParser.TOK_GROUP:
- type = PrincipalType.GROUP;
- break;
- case HiveParser.TOK_ROLE:
- type = PrincipalType.ROLE;
- break;
+ ASTNode param = null;
+ if (ast.getChildCount() > 0) {
+ param = (ASTNode) ast.getChild(0);
+ principalDesc = getPrincipalDesc(param);
+ if (principalDesc != null) {
+ param = (ASTNode) ast.getChild(1); // shift one
+ }
}
- String principalName = BaseSemanticAnalyzer.unescapeIdentifier(principal.getChild(0).getText());
- PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
- List<String> cols = null;
- if (ast.getChildCount() > 1) {
- ASTNode child = (ASTNode) ast.getChild(1);
- if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
+ if (param != null) {
+ if (param.getType() == HiveParser.TOK_RESOURCE_ALL) {
+ privHiveObj = new PrivilegeObjectDesc();
+ } else if (param.getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = new PrivilegeObjectDesc();
//set object name
- privHiveObj.setObject(BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText()));
+ String text = param.getChild(0).getText();
+ privHiveObj.setObject(BaseSemanticAnalyzer.unescapeIdentifier(text));
//set object type
- ASTNode objTypeNode = (ASTNode) child.getChild(1);
+ ASTNode objTypeNode = (ASTNode) param.getChild(1);
privHiveObj.setTable(objTypeNode.getToken().getType() == HiveParser.TOK_TABLE_TYPE);
//set col and partition spec if specified
- if (child.getChildCount() > 2) {
- for (int i = 2; i < child.getChildCount(); i++) {
- ASTNode grandChild = (ASTNode) child.getChild(i);
- if (grandChild.getToken().getType() == HiveParser.TOK_PARTSPEC) {
- privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(grandChild));
- } else if (grandChild.getToken().getType() == HiveParser.TOK_TABCOLNAME) {
- cols = BaseSemanticAnalyzer.getColumnNames(grandChild);
- }
+ for (int i = 2; i < param.getChildCount(); i++) {
+ ASTNode partOrCol = (ASTNode) param.getChild(i);
+ if (partOrCol.getType() == HiveParser.TOK_PARTSPEC) {
+ privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(partOrCol));
+ } else if (partOrCol.getType() == HiveParser.TOK_TABCOLNAME) {
+ cols = BaseSemanticAnalyzer.getColumnNames(partOrCol);
+ } else {
+ throw new SemanticException("Invalid token type " + partOrCol.getType());
}
}
}
}
- if (privHiveObj == null && cols != null) {
- throw new SemanticException(
- "For user-level privileges, column sets should be null. columns="
- + cols.toString());
- }
-
ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
principalDesc, privHiveObj, cols);
return TaskFactory.get(new DDLWork(inputs, outputs, showGrant), conf);
}
+
+ private PrincipalDesc getPrincipalDesc(ASTNode principal) {
+ PrincipalType type = getPrincipalType(principal);
+ if (type != null) {
+ String text = principal.getChild(0).getText();
+ String principalName = BaseSemanticAnalyzer.unescapeIdentifier(text);
+ return new PrincipalDesc(principalName, type);
+ }
+ return null;
+ }
+
+ private PrincipalType getPrincipalType(ASTNode principal) {
+ switch (principal.getType()) {
+ case HiveParser.TOK_USER:
+ return PrincipalType.USER;
+ case HiveParser.TOK_GROUP:
+ return PrincipalType.GROUP;
+ case HiveParser.TOK_ROLE:
+ return PrincipalType.ROLE;
+ default:
+ return null;
+ }
+ }
+
@Override
public Task<? extends Serializable> createRevokeRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) {
Added: hive/trunk/ql/src/test/queries/clientpositive/authorization_9.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/authorization_9.q?rev=1562939&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/authorization_9.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/authorization_9.q Thu Jan 30 20:02:00 2014
@@ -0,0 +1,11 @@
+-- SORT_BEFORE_DIFF
+
+create table dummy (key string, value string);
+
+grant select on database default to user hive_test_user;
+grant select on table dummy to user hive_test_user;
+grant select (key, value) on table dummy to user hive_test_user;
+
+show grant user hive_test_user on database default;
+show grant user hive_test_user on table dummy;
+show grant user hive_test_user on all;
Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out Thu Jan 30 20:02:00 2014
@@ -58,7 +58,7 @@ POSTHOOK: query: show grant user hive_te
POSTHOOK: type: SHOW_GRANT
database default
table authorization_fail_4
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Alter
@@ -66,7 +66,7 @@ privilege Alter
grantor hive_test_user
database default
table authorization_fail_4
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out Thu Jan 30 20:02:00 2014
@@ -71,7 +71,7 @@ POSTHOOK: query: show grant user hive_te
POSTHOOK: type: SHOW_GRANT
database default
table authorization_fail
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Alter
@@ -79,7 +79,7 @@ privilege Alter
grantor hive_test_user
database default
table authorization_fail
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
@@ -87,7 +87,7 @@ privilege Create
grantor hive_test_user
database default
table authorization_fail
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Select
@@ -105,7 +105,7 @@ POSTHOOK: query: show grant user hive_te
POSTHOOK: type: SHOW_GRANT
database default
table authorization_fail
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Alter
@@ -113,7 +113,7 @@ privilege Alter
grantor hive_test_user
database default
table authorization_fail
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out Thu Jan 30 20:02:00 2014
@@ -91,7 +91,7 @@ POSTHOOK: Lineage: authorization_part_fa
POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part_fail
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_group1
principalType GROUP
@@ -106,7 +106,7 @@ POSTHOOK: Lineage: authorization_part_fa
POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part_fail
-partition ds=2010
+partition [2010]
principalName hive_test_group1
principalType GROUP
privilege Select
@@ -166,7 +166,7 @@ POSTHOOK: Lineage: authorization_part_fa
POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part_fail
-partition ds=2011
+partition [2011]
columnName key
principalName hive_test_group1
principalType GROUP
@@ -183,7 +183,7 @@ POSTHOOK: Lineage: authorization_part_fa
POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part_fail
-partition ds=2011
+partition [2011]
principalName hive_test_group1
principalType GROUP
privilege Select
Modified: hive/trunk/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_rename_partition_authorization.q.out Thu Jan 30 20:02:00 2014
@@ -97,7 +97,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_user
principalType USER
@@ -124,7 +124,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010_tmp
+partition [2010_tmp]
columnName key
principalName hive_test_user
principalType USER
Modified: hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out Thu Jan 30 20:02:00 2014
@@ -86,7 +86,7 @@ POSTHOOK: query: show grant user hive_te
POSTHOOK: type: SHOW_GRANT
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
@@ -94,7 +94,7 @@ privilege Create
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Drop
@@ -102,7 +102,7 @@ privilege Drop
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Update
@@ -140,7 +140,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_user
principalType USER
@@ -215,7 +215,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_user
principalType USER
@@ -324,7 +324,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
@@ -332,7 +332,7 @@ privilege Create
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Drop
@@ -340,7 +340,7 @@ privilege Drop
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Update
@@ -386,7 +386,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
@@ -394,7 +394,7 @@ privilege Create
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Drop
@@ -402,7 +402,7 @@ privilege Drop
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Select
@@ -410,7 +410,7 @@ privilege Select
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Update
@@ -442,14 +442,14 @@ database default
table authorization_part
principalName hive_test_user
principalType USER
-privilege Select
+privilege Update
#### A masked pattern was here ####
grantor hive_test_user
database default
table authorization_part
principalName hive_test_user
principalType USER
-privilege Update
+privilege Select
#### A masked pattern was here ####
grantor hive_test_user
PREHOOK: query: select key from authorization_part where ds='2010' order by key limit 20
@@ -535,7 +535,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
@@ -543,7 +543,7 @@ privilege Create
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Drop
@@ -551,7 +551,7 @@ privilege Drop
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Select
@@ -559,7 +559,7 @@ privilege Select
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Update
@@ -619,7 +619,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Create
@@ -627,7 +627,7 @@ privilege Create
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Drop
@@ -635,7 +635,7 @@ privilege Drop
grantor hive_test_user
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_user
principalType USER
privilege Update
@@ -732,7 +732,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_group1
principalType GROUP
@@ -827,7 +827,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_group1
principalType GROUP
@@ -997,7 +997,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_group1
principalType GROUP
privilege Select
@@ -1100,7 +1100,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2010).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
principalName hive_test_group1
principalType GROUP
privilege Select
Modified: hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out?rev=1562939&r1=1562938&r2=1562939&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out Thu Jan 30 20:02:00 2014
@@ -111,7 +111,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2011).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2010
+partition [2010]
columnName key
principalName hive_test_user
principalType USER
@@ -128,7 +128,7 @@ POSTHOOK: Lineage: authorization_part PA
POSTHOOK: Lineage: authorization_part PARTITION(ds=2011).value SIMPLE [(src_auth_tmp)src_auth_tmp.FieldSchema(name:value, type:string, comment:null), ]
database default
table authorization_part
-partition ds=2011
+partition [2011]
columnName key
principalName hive_test_user
principalType USER
Added: hive/trunk/ql/src/test/results/clientpositive/authorization_9.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_9.q.out?rev=1562939&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_9.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_9.q.out Thu Jan 30 20:02:00 2014
@@ -0,0 +1,79 @@
+PREHOOK: query: -- SORT_BEFORE_DIFF
+
+create table dummy (key string, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- SORT_BEFORE_DIFF
+
+create table dummy (key string, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dummy
+PREHOOK: query: grant select on database default to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: query: grant select on database default to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+PREHOOK: query: grant select on table dummy to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: default@dummy
+POSTHOOK: query: grant select on table dummy to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: default@dummy
+PREHOOK: query: grant select (key, value) on table dummy to user hive_test_user
+PREHOOK: type: GRANT_PRIVILEGE
+PREHOOK: Output: default@dummy
+POSTHOOK: query: grant select (key, value) on table dummy to user hive_test_user
+POSTHOOK: type: GRANT_PRIVILEGE
+POSTHOOK: Output: default@dummy
+PREHOOK: query: show grant user hive_test_user on database default
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user on database default
+POSTHOOK: type: SHOW_GRANT
+database default
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+PREHOOK: query: show grant user hive_test_user on table dummy
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user on table dummy
+POSTHOOK: type: SHOW_GRANT
+database default
+table dummy
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+PREHOOK: query: show grant user hive_test_user on all
+PREHOOK: type: SHOW_GRANT
+POSTHOOK: query: show grant user hive_test_user on all
+POSTHOOK: type: SHOW_GRANT
+database default
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+database default
+table dummy
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+database default
+table dummy
+columnName key
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user
+database default
+table dummy
+columnName value
+principalName hive_test_user
+principalType USER
+privilege Select
+#### A masked pattern was here ####
+grantor hive_test_user