You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by sp...@apache.org on 2017/09/21 21:58:27 UTC
[2/2] sentry git commit: SENTRY-1958: Bump to Hive version 2.0
(Sergio Pena, reviewed by Alexander Kolbasov)
SENTRY-1958: Bump to Hive version 2.0 (Sergio Pena, reviewed by Alexander Kolbasov)
Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/da1863f3
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/da1863f3
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/da1863f3
Branch: refs/heads/master
Commit: da1863f3479026ee2cccade53ef422c2ccc9ff91
Parents: 5968419
Author: Sergio Pena <se...@cloudera.com>
Authored: Thu Sep 21 16:57:47 2017 -0500
Committer: Sergio Pena <se...@cloudera.com>
Committed: Thu Sep 21 16:57:47 2017 -0500
----------------------------------------------------------------------
dev-support/test-patch.py | 2 +-
pom.xml | 15 +-
.../sentry-binding-hive-follower-v2/pom.xml | 6 -
.../sentry-binding-hive-follower/pom.xml | 6 -
.../json/SentryJSONAlterPartitionMessage.java | 9 +-
.../json/SentryJSONDropPartitionMessage.java | 1 +
.../json/SentryJSONMessageDeserializer.java | 16 ++
.../json/SentryJSONMessageFactory.java | 118 ++++++-----
sentry-binding/sentry-binding-hive-v2/pom.xml | 7 -
sentry-binding/sentry-binding-hive/pom.xml | 6 -
.../hive/ql/exec/SentryGrantRevokeTask.java | 7 +-
.../hive/HiveAuthzBindingSessionHook.java | 37 +---
.../hive/authz/HiveAuthzPrivilegesMap.java | 2 -
.../hive/authz/SentryHiveAuthorizerFactory.java | 45 +++++
.../hive/authz/SentryHiveAuthorizerImpl.java | 196 +++++++++++++++++++
.../metastore/AuthorizingObjectStore.java | 2 +-
.../metastore/AuthorizingObjectStoreBase.java | 2 +-
.../metastore/MetastoreAuthzBindingBase.java | 22 ++-
.../SentryMetastorePostEventListener.java | 17 +-
.../SentryMetastorePostEventListenerBase.java | 18 +-
...tastorePostEventListenerNotificationLog.java | 7 +-
.../TestSentryHiveAuthorizationTaskFactory.java | 1 +
.../org/apache/sentry/binding/hive/TestURI.java | 1 +
.../src/test/resources/log4j2.properties | 50 +++++
sentry-provider/sentry-provider-db/pom.xml | 100 +++-------
.../thrift/SentryPolicyStoreProcessor.java | 7 +-
sentry-tests/sentry-tests-hive/pom.xml | 19 ++
.../tests/e2e/dbprovider/TestDbDDLAuditLog.java | 2 +-
.../e2e/hdfs/TestHDFSIntegrationAdvanced.java | 2 +-
.../tests/e2e/hdfs/TestHDFSIntegrationBase.java | 14 ++
.../e2e/hdfs/TestHDFSIntegrationEnd2End.java | 3 +
.../AbstractTestWithStaticConfiguration.java | 1 +
.../e2e/hive/TestMetadataObjectRetrieval.java | 2 +-
.../tests/e2e/hive/TestOperationsPart1.java | 41 ++--
.../e2e/hive/TestPrivilegesAtFunctionScope.java | 30 ---
.../e2e/hive/hiveserver/HiveServerFactory.java | 23 ++-
...NotificationListenerInBuiltDeserializer.java | 4 +-
.../TestSentryListenerSentryDeserializer.java | 9 +-
.../src/test/resources/log4j2.properties | 53 +++++
sentry-tests/sentry-tests-kafka/pom.xml | 84 ++------
sentry-tests/sentry-tests-solr/pom.xml | 84 ++------
sentry-tests/sentry-tests-sqoop/pom.xml | 82 ++------
42 files changed, 663 insertions(+), 490 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/dev-support/test-patch.py
----------------------------------------------------------------------
diff --git a/dev-support/test-patch.py b/dev-support/test-patch.py
index ac91b59..e44be3a 100644
--- a/dev-support/test-patch.py
+++ b/dev-support/test-patch.py
@@ -319,7 +319,7 @@ else:
result.info("patch applied and built but tests did not execute")
if hive_authz2:
result.info("INFO: Test patch for Hive authz2")
- mvn_profile="-P-hive-authz1,hive-authz2,-datanucleus3,datanucleus4"
+ mvn_profile="-P-hive-authz1,hive-authz2"
output_dir_v2 = output_dir + "/v2"
os.mkdir(output_dir_v2)
mvn_clean(result, mvn_repo, output_dir_v2, mvn_profile)
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 53679f9..877a452 100644
--- a/pom.xml
+++ b/pom.xml
@@ -65,14 +65,13 @@ limitations under the License.
<commons.lang.version>2.6</commons.lang.version>
<commons.logging.version>1.2</commons.logging.version>
<curator.version>2.11.1</curator.version>
- <datanucleus.maven.plugin.version>4.0.1</datanucleus.maven.plugin.version>
<derby.version>10.10.2.0</derby.version>
<easymock.version>3.0</easymock.version>
<fest.reflect.version>1.4.1</fest.reflect.version>
<guava.version>14.0.1</guava.version>
<hadoop.version>2.7.2</hadoop.version>
<hamcrest.version>1.3</hamcrest.version>
- <hive.version>1.1.0</hive.version>
+ <hive.version>2.0.0</hive.version>
<jackson.version>1.8.8</jackson.version>
<jdo-api.version>3.0.1</jdo-api.version>
<jettyVersion>8.1.19.v20160209</jettyVersion>
@@ -97,7 +96,15 @@ limitations under the License.
<zookeeper.version>3.4.5</zookeeper.version>
<hadoop-aws.version>2.7.0</hadoop-aws.version>
<maven.jar.plugin.version>3.0.2</maven.jar.plugin.version>
- <datanucleus-core.version>3.2.12</datanucleus-core.version>
+
+ <!-- Datanucleus package versions -->
+ <datanucleus.maven.plugin.version>4.0.5</datanucleus.maven.plugin.version>
+ <datanucleus-core.version>4.1.17</datanucleus-core.version>
+ <datanucleus-rdbms.version>4.1.17</datanucleus-rdbms.version>
+ <datanucleus-api-jdo.version>4.2.5</datanucleus-api-jdo.version>
+
+ <!-- Datanucleus package used for the enhancer (newer versions fail building sentry) -->
+ <datanucleus-jdo.version>3.2.0-m3</datanucleus-jdo.version>
</properties>
<dependencyManagement>
@@ -804,7 +811,7 @@ limitations under the License.
<activeByDefault>true</activeByDefault>
</activation>
<properties>
- <hive.version>1.1.0</hive.version>
+ <hive.version>2.0.0</hive.version>
</properties>
<dependencies>
<dependency>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-follower-v2/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-follower-v2/pom.xml b/sentry-binding/sentry-binding-hive-follower-v2/pom.xml
index fa7e928..bf7977c 100644
--- a/sentry-binding/sentry-binding-hive-follower-v2/pom.xml
+++ b/sentry-binding/sentry-binding-hive-follower-v2/pom.xml
@@ -28,12 +28,6 @@ limitations under the License.
<artifactId>sentry-binding-hive-follower-v2</artifactId>
<name>Hive follower v2 for Sentry</name>
- <properties>
- <datanucleus-api-jdo.version>4.2.1</datanucleus-api-jdo.version>
- <datanucleus-core.version>4.1.6</datanucleus-core.version>
- <datanucleus-rdbms.version>4.1.7</datanucleus-rdbms.version>
- </properties>
-
<dependencies>
<dependency>
<groupId>org.datanucleus</groupId>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-follower/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-follower/pom.xml b/sentry-binding/sentry-binding-hive-follower/pom.xml
index e69519c..9480d52 100644
--- a/sentry-binding/sentry-binding-hive-follower/pom.xml
+++ b/sentry-binding/sentry-binding-hive-follower/pom.xml
@@ -28,12 +28,6 @@ limitations under the License.
<artifactId>sentry-binding-hive-follower</artifactId>
<name>Hive follower for Sentry</name>
- <properties>
- <datanucleus-api-jdo.version>3.2.6</datanucleus-api-jdo.version>
- <datanucleus-core.version>3.2.12</datanucleus-core.version>
- <datanucleus-rdbms.version>3.2.12</datanucleus-rdbms.version>
- </properties>
-
<dependencies>
<dependency>
<groupId>org.datanucleus</groupId>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONAlterPartitionMessage.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONAlterPartitionMessage.java b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONAlterPartitionMessage.java
index 1e636c9..25de808 100644
--- a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONAlterPartitionMessage.java
+++ b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONAlterPartitionMessage.java
@@ -19,12 +19,13 @@
package org.apache.sentry.binding.metastore.messaging.json;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
import java.util.Collections;
import org.apache.hive.hcatalog.messaging.json.JSONAlterPartitionMessage;
import org.codehaus.jackson.annotate.JsonProperty;
import java.util.List;
+import java.util.Map;
public class SentryJSONAlterPartitionMessage extends JSONAlterPartitionMessage {
@JsonProperty
@@ -35,12 +36,12 @@ public class SentryJSONAlterPartitionMessage extends JSONAlterPartitionMessage {
private List<String> newValues;
public SentryJSONAlterPartitionMessage() {
- super("", "", "", "", ImmutableList.<String>of(), null);
+ super("", "", "", "", ImmutableMap.<String, String>of(), null);
}
public SentryJSONAlterPartitionMessage(String server, String servicePrincipal,
String db, String table,
- List<String> values, List<String> newValues,
+ Map<String, String> values, List<String> newValues,
Long timestamp, String oldlocation,
String newLocation) {
super(server, servicePrincipal, db, table, values, timestamp);
@@ -55,7 +56,7 @@ public class SentryJSONAlterPartitionMessage extends JSONAlterPartitionMessage {
Long timestamp, String oldlocation,
String newLocation) {
this(server, servicePrincipal, db, table,
- Collections.<String>emptyList(), Collections.<String>emptyList(),
+ Collections.<String, String>emptyMap(), Collections.<String>emptyList(),
timestamp, oldlocation, newLocation);
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONDropPartitionMessage.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONDropPartitionMessage.java b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONDropPartitionMessage.java
index d3ebf60..d5f899c 100644
--- a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONDropPartitionMessage.java
+++ b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONDropPartitionMessage.java
@@ -47,4 +47,5 @@ public class SentryJSONDropPartitionMessage extends JSONDropPartitionMessage {
public String toString() {
return SentryJSONMessageDeserializer.serialize(this);
}
+
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageDeserializer.java b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageDeserializer.java
index cc0bbec..d11b261 100644
--- a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageDeserializer.java
+++ b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageDeserializer.java
@@ -19,6 +19,7 @@
package org.apache.sentry.binding.metastore.messaging.json;
import org.apache.hive.hcatalog.messaging.*;
+import org.apache.hive.hcatalog.messaging.json.JSONInsertMessage;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.ObjectMapper;
@@ -128,6 +129,21 @@ public class SentryJSONMessageDeserializer extends MessageDeserializer {
}
}
+ /**
+ * Method to de-serialize JSONInsertMessage instance.
+ */
+ @Override
+ public InsertMessage getInsertMessage(String messageBody) {
+ // Sentry would be not be interested in InsertMessage as these are generated when is data is
+ // added inserted. This method is implemented for completeness. This is reason why, new sentry
+ // JSON class is not defined for InsertMessage.
+ try {
+ return mapper.readValue(messageBody, JSONInsertMessage.class);
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Could not construct InsertMessage", e);
+ }
+ }
+
public static String serialize(Object object) {
try {
return mapper.writeValueAsString(object);
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageFactory.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageFactory.java b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageFactory.java
index efdf8b8..b531976 100644
--- a/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageFactory.java
+++ b/sentry-binding/sentry-binding-hive-follower/src/main/java/org/apache/sentry/binding/metastore/messaging/json/SentryJSONMessageFactory.java
@@ -21,13 +21,11 @@ package org.apache.sentry.binding.metastore.messaging.json;
import com.google.common.collect.Lists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.classification.InterfaceAudience;
-import org.apache.hadoop.hive.common.classification.InterfaceStability;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
import org.apache.hive.hcatalog.messaging.*;
+import org.apache.hive.hcatalog.messaging.json.JSONInsertMessage;
import java.util.*;
@@ -35,6 +33,22 @@ public class SentryJSONMessageFactory extends MessageFactory {
private static final Log LOG = LogFactory.getLog(SentryJSONMessageFactory.class.getName());
private static SentryJSONMessageDeserializer deserializer = new SentryJSONMessageDeserializer();
+ // This class has basic information from a Partition. It is used to get a list of Partition
+ // information from an iterator instead of having a list of Partition objects. Partition objects
+ // may hold more information that can cause memory issues if we get a large list.
+ private class PartitionBasicInfo {
+ private List<Map<String, String>> partitionList = Lists.newLinkedList();
+ private List<String> locations = Lists.newArrayList();
+
+ public List<Map<String, String>> getPartitionList() {
+ return partitionList;
+ }
+
+ public List<String> getLocations() {
+ return locations;
+ }
+ }
+
public SentryJSONMessageFactory() {
LOG.info("Using SentryJSONMessageFactory for building Notification log messages ");
}
@@ -76,52 +90,58 @@ public class SentryJSONMessageFactory extends MessageFactory {
table.getTableName(), now(), table.getSd().getLocation());
}
- public SentryJSONAddPartitionMessage buildAddPartitionMessage(Table table, List<Partition> partitions) {
- return new SentryJSONAddPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, table.getDbName(),
- table.getTableName(), getPartitionKeyValues(table, partitions), now(),
- getPartitionLocations(partitions));
+ @Override
+ public SentryJSONAlterPartitionMessage buildAlterPartitionMessage(Table table,
+ Partition before, Partition after) {
+ return new SentryJSONAlterPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL,
+ before.getDbName(), before.getTableName(), getPartitionKeyValues(table, before),
+ after.getValues(), now(), before.getSd().getLocation(), after.getSd().getLocation());
}
- private List<String> getPartitionLocations(List<Partition> partitions) {
- List<String> paths = Lists.newLinkedList();
- for (Partition partition : partitions) {
- paths.add(partition.getSd().getLocation());
- }
- return paths;
- }
+ @Override
+ public DropPartitionMessage buildDropPartitionMessage(Table table, Iterator<Partition> partitions) {
+ PartitionBasicInfo partitionBasicInfo = getPartitionBasicInfo(table, partitions);
- private List<String> getPartitionLocations(PartitionSpecProxy partitionSpec) {
- Iterator<Partition> iterator = partitionSpec.getPartitionIterator();
- List<String> locations = Lists.newLinkedList();
- while (iterator.hasNext()) {
- locations.add(iterator.next().getSd().getLocation());
- }
- return locations;
+ return new SentryJSONDropPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL,
+ table.getDbName(), table.getTableName(), partitionBasicInfo.getPartitionList(),
+ now(), partitionBasicInfo.getLocations());
}
- @InterfaceAudience.LimitedPrivate( {"Hive"})
- @InterfaceStability.Evolving
- public SentryJSONAddPartitionMessage buildAddPartitionMessage(Table table, PartitionSpecProxy partitionSpec) {
- return new SentryJSONAddPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, table.getDbName(),
- table.getTableName(), getPartitionKeyValues(table, partitionSpec), now(),
- getPartitionLocations(partitionSpec));
+ @Override
+ public InsertMessage buildInsertMessage(String db, String table, Map<String,String> partKeyVals,
+ List<String> files) {
+ // Sentry would be not be interested in InsertMessage as these are generated when is data is
+ // added inserted. This method is implemented for completeness. This is reason why, new sentry
+ // JSON class is not defined for InsertMessage.
+ return new JSONInsertMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, db, table, partKeyVals,
+ files, now());
}
@Override
- public SentryJSONAlterPartitionMessage buildAlterPartitionMessage(Partition before, Partition after) {
- return new SentryJSONAlterPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, before.getDbName(),
- before.getTableName(), before.getValues(), after.getValues(), now(), before.getSd().getLocation(),
- after.getSd().getLocation());
+ public AddPartitionMessage buildAddPartitionMessage(Table table,
+ Iterator<Partition> partitionsIterator) {
+ PartitionBasicInfo partitionBasicInfo = getPartitionBasicInfo(table, partitionsIterator);
+
+ return new SentryJSONAddPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, table.getDbName(),
+ table.getTableName(), partitionBasicInfo.getPartitionList(), now(),
+ partitionBasicInfo.getLocations());
}
- public SentryJSONAlterPartitionMessage buildAlterPartitionMessage(Table table, Partition before, Partition after) {
- return buildAlterPartitionMessage(before, after);
+ public AddPartitionMessage buildAddPartitionMessage(Table table,
+ List<Partition> partitions) {
+ return buildAddPartitionMessage (table, partitions.iterator());
}
- public SentryJSONDropPartitionMessage buildDropPartitionMessage(Table table, Partition partition) {
- return new SentryJSONDropPartitionMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, partition.getDbName(),
- partition.getTableName(), Arrays.asList(getPartitionKeyValues(table, partition)),
- now(), Arrays.asList(partition.getSd().getLocation()));
+ private PartitionBasicInfo getPartitionBasicInfo(Table table, Iterator<Partition> iterator) {
+ PartitionBasicInfo partitionBasicInfo = new PartitionBasicInfo();
+ while(iterator.hasNext()) {
+ Partition partition = iterator.next();
+
+ partitionBasicInfo.getPartitionList().add(getPartitionKeyValues(table, partition));
+ partitionBasicInfo.getLocations().add(partition.getSd().getLocation());
+ }
+
+ return partitionBasicInfo;
}
private static Map<String, String> getPartitionKeyValues(Table table, Partition partition) {
@@ -134,30 +154,6 @@ public class SentryJSONMessageFactory extends MessageFactory {
return partitionKeys;
}
- private static List<Map<String, String>> getPartitionKeyValues(Table table, List<Partition> partitions) {
- List<Map<String, String>> partitionList = Lists.newLinkedList();
-
- for (Partition partition : partitions) {
- partitionList.add(getPartitionKeyValues(table, partition));
- }
-
- return partitionList;
- }
-
- @InterfaceAudience.LimitedPrivate( {"Hive"})
- @InterfaceStability.Evolving
- private static List<Map<String, String>> getPartitionKeyValues(Table table, PartitionSpecProxy partitionSpec) {
- ArrayList partitionList = new ArrayList();
- PartitionSpecProxy.PartitionIterator iterator = partitionSpec.getPartitionIterator();
-
- while (iterator.hasNext()) {
- Partition partition = iterator.next();
- partitionList.add(getPartitionKeyValues(table, partition));
- }
-
- return partitionList;
- }
-
//This is private in parent class
private long now() {
return System.currentTimeMillis() / 1000L;
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive-v2/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive-v2/pom.xml b/sentry-binding/sentry-binding-hive-v2/pom.xml
index 5f5cbf3..06e3df5 100644
--- a/sentry-binding/sentry-binding-hive-v2/pom.xml
+++ b/sentry-binding/sentry-binding-hive-v2/pom.xml
@@ -28,13 +28,6 @@ limitations under the License.
<artifactId>sentry-binding-hive-v2</artifactId>
<name>Sentry Binding v2 for Hive</name>
- <properties>
- <datanucleus-api-jdo.version>4.2.1</datanucleus-api-jdo.version>
- <datanucleus-core.version>4.1.6</datanucleus-core.version>
- <datanucleus-rdbms.version>4.1.7</datanucleus-rdbms.version>
- <datanucleus-jdo.version>3.2.0-m3</datanucleus-jdo.version>
- </properties>
-
<dependencies>
<dependency>
<groupId>org.apache.sentry</groupId>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/pom.xml b/sentry-binding/sentry-binding-hive/pom.xml
index 92147e1..fc7a7ff 100644
--- a/sentry-binding/sentry-binding-hive/pom.xml
+++ b/sentry-binding/sentry-binding-hive/pom.xml
@@ -28,12 +28,6 @@ limitations under the License.
<artifactId>sentry-binding-hive</artifactId>
<name>Sentry Binding for Hive</name>
- <properties>
- <datanucleus-api-jdo.version>3.2.6</datanucleus-api-jdo.version>
- <datanucleus-core.version>3.2.12</datanucleus-core.version>
- <datanucleus-rdbms.version>3.2.12</datanucleus-rdbms.version>
- </properties>
-
<dependencies>
<dependency>
<groupId>org.datanucleus</groupId>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
index 97dbd2a..96e57f1 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.SentryHiveConstants;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
@@ -108,8 +109,10 @@ public class SentryGrantRevokeTask extends Task<DDLWork> implements Serializable
private HiveOperation stmtOperation;
@Override
- public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) {
- super.initialize(conf, queryPlan, driverContext);
+ public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx,
+ CompilationOpContext opContext) {
+ // CompilationOpContext is an unused parameter on the initialize() method.
+ super.initialize(conf, queryPlan, driverContext, null);
this.conf = conf;
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
index a3aa0b0..994ae7a 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
@@ -18,18 +18,10 @@ package org.apache.sentry.binding.hive;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessController;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerImpl;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.session.HiveSessionHookContext;
+import org.apache.sentry.binding.hive.authz.SentryHiveAuthorizerFactory;
import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
import org.apache.sentry.binding.hive.authz.HiveAuthzBindingHookBase;
import com.google.common.base.Joiner;
@@ -52,7 +44,6 @@ public class HiveAuthzBindingSessionHook
ConfVars.HADOOPBIN.varname,
ConfVars.HIVESESSIONID.varname,
ConfVars.HIVEAUXJARS.varname,
- ConfVars.HIVESTATSDBCONNECTIONSTRING.varname,
ConfVars.SCRATCHDIRPERMISSION.varname,
ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname,
ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
@@ -65,29 +56,6 @@ public class HiveAuthzBindingSessionHook
HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET
);
- public static class SentryHiveAuthorizerFactory implements
- HiveAuthorizerFactory {
-
- @Override
- public HiveAuthorizer createHiveAuthorizer(
- HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf,
- HiveAuthenticationProvider hiveAuthenticator,
- HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
- return new SentryHiveAuthorizerImpl(null, null); }
- }
-
- public static class SentryHiveAuthorizerImpl extends HiveAuthorizerImpl {
-
- public SentryHiveAuthorizerImpl(HiveAccessController accessController,
- HiveAuthorizationValidator authValidator) {
- super(accessController, authValidator);
- }
-
- @Override
- public void applyAuthorizationConfigPolicy(HiveConf conf) {
- }
- }
-
/**
* The session hook for sentry authorization that sets the required session level configuration
* 1. Setup the sentry hooks -
@@ -115,8 +83,7 @@ public class HiveAuthzBindingSessionHook
// set user name
sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser());
sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser());
- sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER,
- "org.apache.sentry.binding.hive.HiveAuthzBindingSessionHook$SentryHiveAuthorizerFactory");
+ sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, SentryHiveAuthorizerFactory.class.getName());
// Set MR ACLs to session user
appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB,
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
index 9f3d42d..2a215c4 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/HiveAuthzPrivilegesMap.java
@@ -233,7 +233,6 @@ public class HiveAuthzPrivilegesMap {
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_CLUSTER_SORT, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_FILEFORMAT, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_TOUCH, alterTablePrivilege);
- hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_PROTECTMODE, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_RENAMECOL, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_ADDCOLS, alterTablePrivilege);
@@ -246,7 +245,6 @@ public class HiveAuthzPrivilegesMap {
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_ARCHIVE, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_UNARCHIVE, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_FILEFORMAT, alterTablePrivilege);
- hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_PROTECTMODE, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERPARTITION_SERDEPROPERTIES, alterTablePrivilege);
hiveAuthzStmtPrivMap.put(HiveOperation.ALTERTABLE_MERGEFILES, alterTablePrivilege);
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerFactory.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerFactory.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerFactory.java
new file mode 100644
index 0000000..f6297e9
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerFactory.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.authz;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+
+/**
+ * Factory class that creates a HiveAuthorizer implementation for the Hive authorization V2
+ * API.
+ * <p>
+ * In order to use this class, the hive-site.xml should be configured in the following way:
+ * <p>
+ * <property>
+ * <name>hive.security.authorization.enable</name>
+ * <value>org.apache.sentry.binding.hive.authz.SentryHiveAuthorizerFactory</value>
+ * </property>
+ */
+public class SentryHiveAuthorizerFactory implements HiveAuthorizerFactory {
+ @Override
+ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, HiveAuthenticationProvider hiveAuthenticator, HiveAuthzSessionContext ctx)
+ throws HiveAuthzPluginException {
+ return new SentryHiveAuthorizerImpl();
+ }
+}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerImpl.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerImpl.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerImpl.java
new file mode 100644
index 0000000..9c72876
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/authz/SentryHiveAuthorizerImpl.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.authz;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.AbstractHiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.sentry.binding.metastore.SentryMetaStoreFilterHook;
+
+/**
+ * This is a HiveAuthorizer implementation, and it is used by HiveServer2 to check privileges
+ * of an object, execute GRANT/REVOKE DDL statements and filter HMS metadata. This class is
+ * part of the Hive authorization V2.
+ * <p>
+ * NOTE: For this first version of the class, only the HMS metadata filtering is implemented.
+ * The rest of the authorization is still using Hive authorization V1 API.
+ */
+public class SentryHiveAuthorizerImpl extends AbstractHiveAuthorizer {
+ private SentryMetaStoreFilterHook filterHook;
+
+ public SentryHiveAuthorizerImpl() {
+ filterHook = new SentryMetaStoreFilterHook(null);
+ }
+
+ @Override
+ public VERSION getVersion() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void grantPrivileges(List<HivePrincipal> hivePrincipals,
+ List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void revokePrivileges(List<HivePrincipal> hivePrincipals,
+ List<HivePrivilege> hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void createRole(String roleName, HivePrincipal adminGrantor)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void dropRole(String roleName)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public List<HiveRoleGrant> getPrincipalGrantInfoForRole(String roleName)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public List<HiveRoleGrant> getRoleGrantInfoForPrincipal(HivePrincipal principal)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void grantRole(List<HivePrincipal> hivePrincipals, List<String> roles, boolean grantOption,
+ HivePrincipal grantorPrinc) throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> roles,
+ boolean grantOption, HivePrincipal grantorPrinc)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputsHObjs,
+ List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs,
+ HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
+ if (listObjs == null || listObjs.size() == 0) {
+ return listObjs;
+ }
+
+ switch (listObjs.get(0).getType()) {
+ case DATABASE:
+ return filterDbs(listObjs);
+ case TABLE_OR_VIEW:
+ return filterTables(listObjs);
+ default:
+ return listObjs;
+ }
+ }
+
+ @Override
+ public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal,
+ HivePrivilegeObject privObj) throws HiveAuthzPluginException, HiveAccessControlException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void setCurrentRole(String roleName)
+ throws HiveAccessControlException, HiveAuthzPluginException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public List<String> getCurrentRoleNames() throws HiveAuthzPluginException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
+
+ }
+
+ private List<HivePrivilegeObject> filterDbs(List<HivePrivilegeObject> listObjs) {
+ List<String> dbList = new ArrayList<>(listObjs.size());
+ for (HivePrivilegeObject o : listObjs) {
+ dbList.add(o.getDbname());
+ }
+
+ List<String> filterDbList = filterHook.filterDatabases(dbList);
+ List<HivePrivilegeObject> filterObjs = new ArrayList<>(filterDbList.size());
+ for (String db : filterDbList) {
+ filterObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, db, db));
+ }
+
+ return filterObjs;
+ }
+
+ private List<HivePrivilegeObject> filterTables(List<HivePrivilegeObject> listObjs) {
+ if (listObjs == null || listObjs.size() == 0) {
+ return listObjs;
+ }
+
+ List<String> tableList = new ArrayList<>(listObjs.size());
+ for (HivePrivilegeObject o : listObjs) {
+ tableList.add(o.getObjectName());
+ }
+
+ String db = listObjs.get(0).getDbname();
+
+ List<String> filterTableList =
+ filterHook.filterTableNames(db, tableList);
+
+ List<HivePrivilegeObject> filterObjs = new ArrayList<>(filterTableList.size());
+ for (String table : filterTableList) {
+ filterObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, db, table));
+ }
+
+ return filterObjs;
+ }
+}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
index d20da81..92eb136 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStore.java
@@ -223,7 +223,7 @@ public class AuthorizingObjectStore extends ObjectStore {
@Override
public List<Partition> getPartitionsWithAuth(String dbName, String tblName,
short maxParts, String userName, List<String> groupNames)
- throws MetaException, NoSuchObjectException, InvalidObjectException {
+ throws MetaException, InvalidObjectException {
if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
index 9e066e1..d015085 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/AuthorizingObjectStoreBase.java
@@ -221,7 +221,7 @@ public class AuthorizingObjectStoreBase extends ObjectStore {
@Override
public List<Partition> getPartitionsWithAuth(String dbName, String tblName,
short maxParts, String userName, List<String> groupNames)
- throws MetaException, NoSuchObjectException, InvalidObjectException {
+ throws MetaException, InvalidObjectException {
if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
index 6df4885..0909656 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/MetastoreAuthzBindingBase.java
@@ -19,6 +19,7 @@ package org.apache.sentry.binding.metastore;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
+import java.util.Iterator;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -335,14 +336,21 @@ public abstract class MetastoreAuthzBindingBase extends MetaStorePreEventListene
protected void authorizeDropPartition(PreDropPartitionEvent context)
throws InvalidOperationException, MetaException {
+ Iterator<Partition> partitionIterator = context.getPartitionIterator();
+ HierarcyBuilder inputHierarchy = new HierarcyBuilder();
+ HierarcyBuilder outputHierarchy = new HierarcyBuilder();
+
+ Partition partition;
+ while(partitionIterator.hasNext()) {
+ partition = partitionIterator.next();
+ inputHierarchy.addTableToOutput(getAuthServer(), partition.getDbName(),
+ partition.getTableName());
+ outputHierarchy.addTableToOutput(getAuthServer(), partition.getDbName(),
+ partition.getTableName());
+ }
+
authorizeMetastoreAccess(
- HiveOperation.ALTERTABLE_DROPPARTS,
- new HierarcyBuilder().addTableToOutput(getAuthServer(),
- context.getPartition().getDbName(),
- context.getPartition().getTableName()).build(),
- new HierarcyBuilder().addTableToOutput(getAuthServer(),
- context.getPartition().getDbName(),
- context.getPartition().getTableName()).build());
+ HiveOperation.ALTERTABLE_DROPPARTS, inputHierarchy.build(), outputHierarchy.build());
}
private void authorizeAlterPartition(PreAlterPartitionEvent context)
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
index 3ec2eed..11b6b4a 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
@@ -19,6 +19,7 @@ package org.apache.sentry.binding.metastore;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
@@ -284,7 +285,9 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
return;
}
- for (Partition part : partitionEvent.getPartitions()) {
+ Iterator<Partition> partitionIterator = partitionEvent.getPartitionIterator();
+ while (partitionIterator.hasNext()) {
+ Partition part = partitionIterator.next();
if (part.getSd() != null && part.getSd().getLocation() != null) {
String authzObj = part.getDbName() + "." + part.getTableName();
String path = part.getSd().getLocation();
@@ -309,11 +312,15 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
String authzObj = partitionEvent.getTable().getDbName() + "."
+ partitionEvent.getTable().getTableName();
- String path = partitionEvent.getPartition().getSd().getLocation();
- for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
- plugin.removePath(authzObj, path);
+ Iterator<Partition> partitionIterator = partitionEvent.getPartitionIterator();
+ while (partitionIterator.hasNext()) {
+ Partition part = partitionIterator.next();
+ String path = part.getSd().getLocation();
+ for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+ plugin.removePath(authzObj, path);
+ }
+ super.onDropPartition(partitionEvent);
}
- super.onDropPartition(partitionEvent);
}
private SentryPolicyServiceClient getSentryServiceClient()
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
index 5b9274e..40cf17a 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerBase.java
@@ -17,6 +17,7 @@
*/
package org.apache.sentry.binding.metastore;
+import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
@@ -276,7 +277,9 @@ public class SentryMetastorePostEventListenerBase extends MetaStoreEventListener
return;
}
- for (Partition part : partitionEvent.getPartitions()) {
+ Iterator<Partition> partitionIterator = partitionEvent.getPartitionIterator();
+ while (partitionIterator.hasNext()) {
+ Partition part = partitionIterator.next();
if (part.getSd() != null && part.getSd().getLocation() != null) {
String authzObj = part.getDbName() + "." + part.getTableName();
String path = part.getSd().getLocation();
@@ -301,11 +304,16 @@ public class SentryMetastorePostEventListenerBase extends MetaStoreEventListener
String authzObj = partitionEvent.getTable().getDbName() + "."
+ partitionEvent.getTable().getTableName();
- String path = partitionEvent.getPartition().getSd().getLocation();
- for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
- plugin.removePath(authzObj, path);
+
+ Iterator<Partition> partitionIterator = partitionEvent.getPartitionIterator();
+ while (partitionIterator.hasNext()) {
+ Partition part = partitionIterator.next();
+ String path = part.getSd().getLocation();
+ for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+ plugin.removePath(authzObj, path);
+ }
+ super.onDropPartition(partitionEvent);
}
- super.onDropPartition(partitionEvent);
}
private SentryPolicyServiceClient getSentryServiceClient()
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerNotificationLog.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerNotificationLog.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerNotificationLog.java
index 58470d6..9050231 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerNotificationLog.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListenerNotificationLog.java
@@ -275,7 +275,8 @@ public class SentryMetastorePostEventListenerNotificationLog extends MetaStoreEv
// Any more?
NotificationEvent event = new NotificationEvent(0L, now(), HCatConstants.HCAT_ALTER_PARTITION_EVENT,
- messageFactory.buildAlterPartitionMessage(partitionEvent.getOldPartition(), partitionEvent.getNewPartition()).toString());
+ messageFactory.buildAlterPartitionMessage(partitionEvent.getTable(),
+ partitionEvent.getOldPartition(), partitionEvent.getNewPartition()).toString());
event.setDbName(partitionEvent.getNewPartition().getDbName());
event.setTableName(partitionEvent.getNewPartition().getTableName());
@@ -304,7 +305,7 @@ public class SentryMetastorePostEventListenerNotificationLog extends MetaStoreEv
//TODO: Need more validations?
NotificationEvent event = new NotificationEvent(0L, now(), HCatConstants.HCAT_ADD_PARTITION_EVENT,
- messageFactory.buildAddPartitionMessage(partitionEvent.getTable(), partitionEvent.getPartitions()).toString());
+ messageFactory.buildAddPartitionMessage(partitionEvent.getTable(), partitionEvent.getPartitionIterator()).toString());
event.setDbName(partitionEvent.getTable().getDbName());
event.setTableName(partitionEvent.getTable().getTableName());
@@ -322,7 +323,7 @@ public class SentryMetastorePostEventListenerNotificationLog extends MetaStoreEv
}
NotificationEvent event = new NotificationEvent(0L, now(), HCatConstants.HCAT_DROP_PARTITION_EVENT,
- messageFactory.buildDropPartitionMessage(partitionEvent.getTable(), partitionEvent.getPartition()).toString());
+ messageFactory.buildDropPartitionMessage(partitionEvent.getTable(), partitionEvent.getPartitionIterator()).toString());
//TODO: Why is this asymmetric with add partitions(s)?
// Seems like adding multiple partitions generate a single event
// where as single partition drop generated an event?
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
index aed218e..de073ed 100644
--- a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
+++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestSentryHiveAuthorizationTaskFactory.java
@@ -83,6 +83,7 @@ public class TestSentryHiveAuthorizationTaskFactory {
@Before
public void setup() throws Exception {
conf = new HiveConf();
+ conf.set("datanucleus.schema.autoCreateTables", "true");
baseDir = Files.createTempDir();
baseDir.setWritable(true, false);
conf.setVar(HiveConf.ConfVars.SCRATCHDIR, baseDir.getAbsolutePath());
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
index aa3de64..a3d0d0e6 100644
--- a/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
+++ b/sentry-binding/sentry-binding-hive/src/test/java/org/apache/sentry/binding/hive/TestURI.java
@@ -40,6 +40,7 @@ public class TestURI {
@BeforeClass
public static void setupTestURI() {
conf = new HiveConf();
+ conf.set("datanucleus.schema.autoCreateTables", "true");
baseDir = Files.createTempDir();
baseDir.setWritable(true, false);
conf.setVar(HiveConf.ConfVars.SCRATCHDIR, baseDir.getAbsolutePath());
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-binding/sentry-binding-hive/src/test/resources/log4j2.properties
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/test/resources/log4j2.properties b/sentry-binding/sentry-binding-hive/src/test/resources/log4j2.properties
new file mode 100644
index 0000000..9fd9722
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive/src/test/resources/log4j2.properties
@@ -0,0 +1,50 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Define some default values that can be overridden by system properties.
+#
+# For testing, it may also be convenient to specify
+
+# list of properties
+property.sentry.root.logger = DEBUG
+
+# list of all appenders
+appenders = console
+
+# console appender
+appender.console.name = console
+appender.console.type = Console
+appender.console.target = SYSTEM_ERR
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = %d (%t) [%p - %l] %m%n
+
+# list of all loggers
+loggers = Sentry, HadoopConf
+
+logger.Sentry.name = org.apache.sentry
+logger.Sentry.level = DEBUG
+
+logger.HadoopConf.name = org.apache.hadoop.conf.Configuration
+logger.HadoopConf.level = INFO
+
+# root logger
+rootLogger.level = ${sys:sentry.root.logger}
+rootLogger.appenderRefs = console
+rootLogger.appenderRef.console.ref = console
+rootLogger.appenderRef.console.level = ${sys:sentry.root.logger}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-provider/sentry-provider-db/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/pom.xml b/sentry-provider/sentry-provider-db/pom.xml
index 6b7d3c0..882a3ce 100644
--- a/sentry-provider/sentry-provider-db/pom.xml
+++ b/sentry-provider/sentry-provider-db/pom.xml
@@ -79,6 +79,26 @@ limitations under the License.
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-core</artifactId>
+ <version>${datanucleus-core.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-api-jdo</artifactId>
+ <version>${datanucleus-api-jdo.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-rdbms</artifactId>
+ <version>${datanucleus-rdbms.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>javax.jdo</artifactId>
+ <version>${datanucleus-jdo.version}</version>
+ </dependency>
+ <dependency>
<groupId>org.apache.sentry</groupId>
<artifactId>sentry-core-common</artifactId>
</dependency>
@@ -154,11 +174,6 @@ limitations under the License.
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>javax.jdo</artifactId>
- <version>3.2.0-m3</version>
- </dependency>
- <dependency>
<groupId>com.codahale.metrics</groupId>
<artifactId>metrics-core</artifactId>
</dependency>
@@ -284,6 +299,7 @@ limitations under the License.
<plugin>
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-maven-plugin</artifactId>
+ <version>${datanucleus.maven.plugin.version}</version>
<configuration>
<api>JDO</api>
<metadataIncludes>**/*.jdo</metadataIncludes>
@@ -297,6 +313,13 @@ limitations under the License.
</goals>
</execution>
</executions>
+ <dependencies>
+ <dependency>
+ <groupId>org.datanucleus</groupId>
+ <artifactId>datanucleus-core</artifactId>
+ <version>${datanucleus-core.version}</version>
+ </dependency>
+ </dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -346,73 +369,6 @@ limitations under the License.
</dependencies>
</profile>
<profile>
- <id>datanucleus3</id>
- <activation>
- <activeByDefault>true</activeByDefault>
- </activation>
- <properties>
- <datanucleus-api-jdo.version>3.2.6</datanucleus-api-jdo.version>
- <datanucleus-core.version>3.2.12</datanucleus-core.version>
- <datanucleus-rdbms.version>3.2.12</datanucleus-rdbms.version>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>datanucleus-core</artifactId>
- <version>${datanucleus-core.version}</version>
- </dependency>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>datanucleus-api-jdo</artifactId>
- <version>${datanucleus-api-jdo.version}</version>
- </dependency>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>datanucleus-rdbms</artifactId>
- <version>${datanucleus-rdbms.version}</version>
- </dependency>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>javax.jdo</artifactId>
- <version>3.2.0-m3</version>
- </dependency>
- </dependencies>
- </profile>
- <profile>
- <id>datanucleus4</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <properties>
- <datanucleus-api-jdo.version>4.2.1</datanucleus-api-jdo.version>
- <datanucleus-core.version>4.1.6</datanucleus-core.version>
- <datanucleus-rdbms.version>4.1.7</datanucleus-rdbms.version>
- <datanucleus-jdo.version>3.2.0-m3</datanucleus-jdo.version>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>datanucleus-core</artifactId>
- <version>${datanucleus-core.version}</version>
- </dependency>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>datanucleus-api-jdo</artifactId>
- <version>${datanucleus-api-jdo.version}</version>
- </dependency>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>datanucleus-rdbms</artifactId>
- <version>${datanucleus-rdbms.version}</version>
- </dependency>
- <dependency>
- <groupId>org.datanucleus</groupId>
- <artifactId>javax.jdo</artifactId>
- <version>3.2.0-m3</version>
- </dependency>
- </dependencies>
- </profile>
- <profile>
<id>thriftif</id>
<build>
<plugins>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
index cd85400..71eb9c1 100644
--- a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
+++ b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
@@ -59,8 +59,7 @@ import org.apache.sentry.service.thrift.ServiceConstants.ThriftConstants;
import org.apache.sentry.service.thrift.Status;
import org.apache.sentry.service.thrift.TSentryResponseStatus;
import org.apache.thrift.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.apache.log4j.Logger;
import com.codahale.metrics.Timer;
import static com.codahale.metrics.MetricRegistry.name;
@@ -77,8 +76,8 @@ import static org.apache.sentry.hdfs.Updateable.Update;
@SuppressWarnings("unused")
public class SentryPolicyStoreProcessor implements SentryPolicyService.Iface {
- private static final Logger LOGGER = LoggerFactory.getLogger(SentryPolicyStoreProcessor.class);
- private static final Logger AUDIT_LOGGER = LoggerFactory.getLogger(Constants.AUDIT_LOGGER_NAME);
+ private static final Logger LOGGER = Logger.getLogger(SentryPolicyStoreProcessor.class);
+ private static final Logger AUDIT_LOGGER = Logger.getLogger(Constants.AUDIT_LOGGER_NAME);
static final String SENTRY_POLICY_SERVICE_NAME = "SentryPolicyService";
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/pom.xml b/sentry-tests/sentry-tests-hive/pom.xml
index 51801dc..75014d4 100644
--- a/sentry-tests/sentry-tests-hive/pom.xml
+++ b/sentry-tests/sentry-tests-hive/pom.xml
@@ -35,6 +35,15 @@ limitations under the License.
<HIVE_CONF_DIR>${env.HIVE_CONF_DIR}</HIVE_CONF_DIR>
</properties>
<dependencies>
+ <!-- This dependency is just added to allow StringUtils.removePrefix() to work because
+ Hive 2.x was bringing a lower version of ant. We should figure out a different
+ API to remove the prefix instead of adding the ant dependency
+ -->
+ <dependency>
+ <groupId>org.apache.ant</groupId>
+ <artifactId>ant</artifactId>
+ <version>1.9.1</version>
+ </dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
@@ -102,6 +111,10 @@ limitations under the License.
<groupId>org.datanucleus</groupId>
<artifactId>datanucleus-rdbms</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-1.2-api</artifactId>
+ </exclusion>
</exclusions>
<scope>test</scope>
</dependency>
@@ -116,6 +129,12 @@ limitations under the License.
<artifactId>hive-common</artifactId>
<version>${hive.version}</version>
<scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.logging.log4j</groupId>
+ <artifactId>log4j-1.2-api</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbDDLAuditLog.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbDDLAuditLog.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbDDLAuditLog.java
index e105f00..1619f7f 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbDDLAuditLog.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbDDLAuditLog.java
@@ -44,8 +44,8 @@ public class TestDbDDLAuditLog extends AbstractTestWithStaticConfiguration {
public static void setupTestStaticConfiguration() throws Exception {
useSentryService = true;
AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
- Logger logger = Logger.getLogger("sentry.hive.authorization.ddl.logger");
AuditLoggerTestAppender testAppender = new AuditLoggerTestAppender();
+ Logger logger = Logger.getLogger(Constants.AUDIT_LOGGER_NAME);
logger.addAppender(testAppender);
logger.setLevel(Level.INFO);
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
index 2073d85..95bbaeb 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
@@ -268,7 +268,7 @@ public class TestHDFSIntegrationAdvanced extends TestHDFSIntegrationBase {
stmt = conn.createStatement();
stmt.execute("create database " + dbName);
LOGGER.info("create external table in " + tmpHDFSPartitionStr);
- stmt.execute("create external table tab1(a int) partitioned by (date string) location 'hdfs://" + tmpHDFSPartitionStr + "'");
+ stmt.execute("create external table tab1(a int) partitioned by (date1 string) location 'hdfs://" + tmpHDFSPartitionStr + "'");
miniDFS.getFileSystem().setOwner(tmpHDFSDir, "hdfs", "hdfs");
miniDFS.getFileSystem().setPermission(tmpHDFSDir, FsPermission.valueOf("drwxrwx---"));
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
index 718b02c..27cfba9 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
@@ -35,6 +35,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import com.google.common.base.Preconditions;
@@ -63,6 +65,7 @@ import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.sentry.binding.hive.SentryHiveAuthorizationTaskFactoryImpl;
+import org.apache.sentry.binding.hive.authz.SentryHiveAuthorizerFactory;
import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
import org.apache.sentry.hdfs.SentryHDFSServiceClientFactory;
import org.apache.sentry.hdfs.SentryINodeAttributesProvider;
@@ -559,6 +562,13 @@ public abstract class TestHDFSIntegrationBase {
hiveConf.set("datanucleus.autoCreateSchema", "true");
hiveConf.set("datanucleus.fixedDatastore", "false");
hiveConf.set("datanucleus.autoStartMechanism", "SchemaTable");
+ hiveConf.set("datanucleus.schema.autoCreateTables", "true");
+
+ hiveConf.set(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "false");
+ hiveConf.set(ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, SentryHiveAuthorizerFactory.class.getName());
+ hiveConf.set(ConfVars.HIVE_CBO_ENABLED.varname, "false");
+ hiveConf.set(ConfVars.METASTORE_DISALLOW_INCOMPATIBLE_COL_TYPE_CHANGES.varname, "false");
+ hiveConf.set(ConfVars.HIVE_IN_TEST.varname, "true");
// Sets hive.metastore.authorization.storage.checks to true, so that
// disallow the operations such as drop-partition if the user in question
@@ -612,12 +622,15 @@ public abstract class TestHDFSIntegrationBase {
hiveUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
+ final CountDownLatch hmsStartedSignal = new CountDownLatch(1);
+
metastore = new InternalMetastoreServer(hiveConf);
new Thread() {
@Override
public void run() {
try {
metastore.start();
+ hmsStartedSignal.countDown();
while (true) {
Thread.sleep(1000L);
}
@@ -627,6 +640,7 @@ public abstract class TestHDFSIntegrationBase {
}
}.start();
+ hmsStartedSignal.await(30, TimeUnit.SECONDS);
hmsClient = new HiveMetaStoreClient(hiveConf);
startHiveServer2(retries, hiveConf);
return null;
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
index d4bc97d..645fc35 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
@@ -219,7 +219,10 @@ public class TestHDFSIntegrationEnd2End extends TestHDFSIntegrationBase {
stmt.execute("grant all on table exT100 to role tab_role");
verifyOnAllSubDirs("/tmp/external/ext100", FsAction.ALL, "flume", true);
+ stmt.execute("drop table ext100");
+ stmt.execute("drop table ext101");
stmt.execute("use default");
+ stmt.execute("drop database extdb");
//TODO: SENTRY-795: HDFS permissions do not sync when Sentry restarts in HA mode.
if(!testSentryHA) {
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
index 0a39f59..5fb4659 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/AbstractTestWithStaticConfiguration.java
@@ -462,6 +462,7 @@ public abstract class AbstractTestWithStaticConfiguration extends RulesForE2ETes
hiveConf.set("hive.metastore.authorization.storage.checks", "true");
hiveConf.set("hive.metastore.uris", "thrift://localhost:" + hmsPort);
hiveConf.set("sentry.metastore.service.users", "hive");// queries made by hive user (beeline) skip meta store check
+ hiveConf.set("datanucleus.schema.autoCreateTables", "true");
File confDir = assertCreateDir(new File(baseDir, "etc"));
File hiveSite = new File(confDir, "hive-site.xml");
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
index fb0ef19..3c23dc4 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestMetadataObjectRetrieval.java
@@ -435,7 +435,7 @@ public class TestMetadataObjectRetrieval extends AbstractTestWithStaticConfigura
assertEquals(index, rs.getString(1).trim());
assertEquals(table, rs.getString(2).trim());
assertEquals("value", rs.getString(3).trim());
- assertEquals(dbName + "." + dbName + "__" + table + "_" + index + "__",
+ assertEquals(dbName + "__" + table + "_" + index + "__",
rs.getString(4).trim());
assertEquals("compact", rs.getString(5).trim());
}
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
index 3a4da50..1e72990 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java
@@ -543,6 +543,7 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
+ statement.execute("ALTER TABLE tb1 ADD PARTITION (b = '1')");
policyFile.addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
.addRolesToGroup(USERGROUP1, "alter_db1_tb1")
@@ -555,7 +556,7 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 INTO 6 BUCKETS");
- statement.execute("ALTER TABLE tb1 PARTITION (a = '1') INTO 6 BUCKETS");
+ statement.execute("ALTER TABLE tb1 PARTITION (b = '1') INTO 6 BUCKETS");
statement.close();
connection.close();
@@ -566,7 +567,7 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
statement.execute("Use " + DB1);
context.assertSentrySemanticException(statement, "ALTER TABLE tb1 INTO 6 BUCKETS",
semanticException);
- context.assertSentrySemanticException(statement, "ALTER TABLE tb1 PARTITION (a = '1') INTO 6 BUCKETS",
+ context.assertSentrySemanticException(statement, "ALTER TABLE tb1 PARTITION (b = '1') INTO 6 BUCKETS",
semanticException);
statement.close();
@@ -689,21 +690,19 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
2. HiveOperation.ALTERTABLE_SERDEPROPERTIES
3. HiveOperation.ALTERTABLE_CLUSTER_SORT
4. HiveOperation.ALTERTABLE_TOUCH
- 5. HiveOperation.ALTERTABLE_PROTECTMODE
- 6. HiveOperation.ALTERTABLE_FILEFORMAT
- 7. HiveOperation.ALTERTABLE_RENAMEPART
- 8. HiveOperation.ALTERPARTITION_SERDEPROPERTIES
- 9. TODO: archive partition
- 10. TODO: unarchive partition
- 11. HiveOperation.ALTERPARTITION_FILEFORMAT
- 12. TODO: partition touch (is it same as HiveOperation.ALTERTABLE_TOUCH?)
- 13. HiveOperation.ALTERPARTITION_PROTECTMODE
- 14. HiveOperation.ALTERTABLE_RENAMECOL
- 15. HiveOperation.ALTERTABLE_ADDCOLS
- 16. HiveOperation.ALTERTABLE_REPLACECOLS
- 17. TODO: HiveOperation.ALTERVIEW_PROPERTIES
- 18. TODO: HiveOperation.ALTERTABLE_SERIALIZER
- 19. TODO: HiveOperation.ALTERPARTITION_SERIALIZER
+ 5. HiveOperation.ALTERTABLE_FILEFORMAT
+ 6. HiveOperation.ALTERTABLE_RENAMEPART
+ 7. HiveOperation.ALTERPARTITION_SERDEPROPERTIES
+ 8. TODO: archive partition
+ 9. TODO: unarchive partition
+ 10. HiveOperation.ALTERPARTITION_FILEFORMAT
+ 11. TODO: partition touch (is it same as HiveOperation.ALTERTABLE_TOUCH?)
+ 12. HiveOperation.ALTERTABLE_RENAMECOL
+ 13. HiveOperation.ALTERTABLE_ADDCOLS
+ 14. HiveOperation.ALTERTABLE_REPLACECOLS
+ 15. TODO: HiveOperation.ALTERVIEW_PROPERTIES
+ 16. TODO: HiveOperation.ALTERTABLE_SERIALIZER
+ 17. TODO: HiveOperation.ALTERPARTITION_SERIALIZER
*/
@Test
public void testAlterTable() throws Exception {
@@ -736,8 +735,6 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
assertSemanticException(statement, "ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
assertSemanticException(statement, "ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
assertSemanticException(statement, "ALTER TABLE tb1 TOUCH");
- assertSemanticException(statement, "ALTER TABLE tb1 ENABLE NO_DROP");
- assertSemanticException(statement, "ALTER TABLE tb1 DISABLE OFFLINE");
assertSemanticException(statement, "ALTER TABLE tb1 SET FILEFORMAT RCFILE");
assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) RENAME TO PARTITION (b = 2)");
@@ -746,8 +743,6 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
//assertSemanticException(statement, "ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET FILEFORMAT RCFILE");
assertSemanticException(statement, "ALTER TABLE tb1 TOUCH PARTITION (b = 10)");
- assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE NO_DROP");
- assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) DISABLE OFFLINE");
assertSemanticException(statement, "ALTER TABLE tb1 CHANGE COLUMN a c int");
assertSemanticException(statement, "ALTER TABLE tb1 ADD COLUMNS (a int)");
@@ -768,8 +763,6 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
statement.execute("ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
statement.execute("ALTER TABLE tb1 TOUCH");
- statement.execute("ALTER TABLE tb1 ENABLE NO_DROP");
- statement.execute("ALTER TABLE tb1 DISABLE OFFLINE");
statement.execute("ALTER TABLE tb1 SET FILEFORMAT RCFILE");
statement.execute("ALTER TABLE tb1 PARTITION (b = 1) RENAME TO PARTITION (b = 2)");
@@ -778,8 +771,6 @@ public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
//statement.execute("ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET FILEFORMAT RCFILE");
statement.execute("ALTER TABLE tb1 TOUCH PARTITION (b = 2)");
- statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE NO_DROP");
- statement.execute("ALTER TABLE tb1 PARTITION (b = 2) DISABLE OFFLINE");
statement.execute("ALTER TABLE tb1 CHANGE COLUMN a c int");
statement.execute("ALTER TABLE tb1 ADD COLUMNS (a int)");
http://git-wip-us.apache.org/repos/asf/sentry/blob/da1863f3/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
----------------------------------------------------------------------
diff --git a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
index 249d3bc..bd0f978 100644
--- a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
+++ b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestPrivilegesAtFunctionScope.java
@@ -192,21 +192,6 @@ public class TestPrivilegesAtFunctionScope extends AbstractTestWithStaticConfigu
statement.close();
connection.close();
- connection = context.createConnection(USER3_1);
- statement = context.createStatement(connection);
- statement.execute("USE " + DB1);
-
- // user3 only has db1_tab1 privilege but still should be able execute the temp function.
- try {
- verifyPrintFuncValues(statement, "SELECT printf_test('%s', value) FROM " + tableName1);
- } catch (Exception ex) {
- LOGGER.error("test temp func printf_test failed with reason: ", ex);
- fail("fail to test temp func printf_test");
- }
-
- statement.close();
- connection.close();
-
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("USE " + DB1);
@@ -225,21 +210,6 @@ public class TestPrivilegesAtFunctionScope extends AbstractTestWithStaticConfigu
statement.close();
connection.close();
- connection = context.createConnection(USER3_1);
- statement = context.createStatement(connection);
- statement.execute("USE " + DB1);
-
- // user3 only has db1_tab1 privilege but still should be able execute the perm function.
- try {
- verifyPrintFuncValues(statement, "SELECT printf_test_perm('%s', value) FROM " + tableName1);
- } catch (Exception ex) {
- LOGGER.error("test perm func printf_test_perm failed with reason: ", ex);
- fail("Fail to test perm func printf_test_perm");
- }
-
- statement.close();
- connection.close();
-
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("USE " + DB1);