You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2016/06/18 23:20:07 UTC
[2/2] hadoop git commit: HDFS-10437. ReconfigurationProtocol not
covered by HDFSPolicyProvider. (Arpit Agarwal)
HDFS-10437. ReconfigurationProtocol not covered by HDFSPolicyProvider. (Arpit Agarwal)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bd33cc01
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bd33cc01
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bd33cc01
Branch: refs/heads/branch-2
Commit: bd33cc017946f93c99a97e02a19770e2f54e3f63
Parents: b726b5c
Author: Arpit Agarwal <ar...@apache.org>
Authored: Sat Jun 18 16:16:54 2016 -0700
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Sat Jun 18 16:17:10 2016 -0700
----------------------------------------------------------------------
.../hadoop/fs/CommonConfigurationKeys.java | 3 ++
.../apache/hadoop/hdfs/HDFSPolicyProvider.java | 6 +++-
.../hadoop/hdfs/TestHDFSPolicyProvider.java | 31 +++++++++-----------
3 files changed, 22 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd33cc01/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
index 7f510bd..660b4f2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
@@ -179,6 +179,9 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
public static final String
HADOOP_SECURITY_SERVICE_AUTHORIZATION_DATANODE_LIFELINE =
"security.datanode.lifeline.protocol.acl";
+ public static final String
+ HADOOP_SECURITY_SERVICE_AUTHORIZATION_RECONFIGURATION =
+ "security.reconfiguration.protocol.acl";
public static final String
SECURITY_HA_SERVICE_PROTOCOL_ACL = "security.ha.service.protocol.acl";
public static final String
http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd33cc01/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
index 8c20553..edc72c5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HDFSPolicyProvider.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.ha.ZKFCProtocol;
import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.ReconfigurationProtocol;
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocol;
import org.apache.hadoop.hdfs.server.protocol.DatanodeLifelineProtocol;
import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
@@ -80,7 +81,10 @@ public class HDFSPolicyProvider extends PolicyProvider {
TraceAdminProtocol.class),
new Service(
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DATANODE_LIFELINE,
- DatanodeLifelineProtocol.class)
+ DatanodeLifelineProtocol.class),
+ new Service(
+ CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_RECONFIGURATION,
+ ReconfigurationProtocol.class)
};
@Override
http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd33cc01/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSPolicyProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSPolicyProvider.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSPolicyProvider.java
index 95aa89f..e242690 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSPolicyProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSPolicyProvider.java
@@ -23,10 +23,12 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
+import com.google.common.collect.Sets;
import org.apache.commons.lang.ClassUtils;
-import org.apache.hadoop.hdfs.protocol.ReconfigurationProtocol;
import org.apache.hadoop.hdfs.qjournal.server.JournalNodeRpcServer;
import org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
@@ -57,7 +59,7 @@ public class TestHDFSPolicyProvider {
private static final Logger LOG =
LoggerFactory.getLogger(TestHDFSPolicyProvider.class);
- private static List<Class<?>> policyProviderProtocols;
+ private static Set<Class<?>> policyProviderProtocols;
private static final Comparator<Class<?>> CLASS_NAME_COMPARATOR =
new Comparator<Class<?>>() {
@@ -75,11 +77,10 @@ public class TestHDFSPolicyProvider {
@BeforeClass
public static void initialize() {
Service[] services = new HDFSPolicyProvider().getServices();
- policyProviderProtocols = new ArrayList<>(services.length);
+ policyProviderProtocols = new HashSet<>(services.length);
for (Service service : services) {
policyProviderProtocols.add(service.getProtocol());
}
- Collections.sort(policyProviderProtocols, CLASS_NAME_COMPARATOR);
}
public TestHDFSPolicyProvider(Class<?> rpcServerClass) {
@@ -98,29 +99,25 @@ public class TestHDFSPolicyProvider {
@Test
public void testPolicyProviderForServer() {
List<?> ifaces = ClassUtils.getAllInterfaces(rpcServerClass);
- List<Class<?>> serverProtocols = new ArrayList<>(ifaces.size());
+ Set<Class<?>> serverProtocols = new HashSet<>(ifaces.size());
for (Object obj : ifaces) {
Class<?> iface = (Class<?>)obj;
- // ReconfigurationProtocol is not covered in HDFSPolicyProvider
- // currently, so we have a special case to skip it. This needs follow-up
- // investigation.
- if (iface.getSimpleName().endsWith("Protocol") &&
- iface != ReconfigurationProtocol.class) {
+ if (iface.getSimpleName().endsWith("Protocol")) {
serverProtocols.add(iface);
}
}
- Collections.sort(serverProtocols, CLASS_NAME_COMPARATOR);
LOG.info("Running test {} for RPC server {}. Found server protocols {} "
+ "and policy provider protocols {}.", testName.getMethodName(),
rpcServerClass.getName(), serverProtocols, policyProviderProtocols);
assertFalse("Expected to find at least one protocol in server.",
serverProtocols.isEmpty());
+ final Set<Class<?>> differenceSet =
+ Sets.difference(serverProtocols, policyProviderProtocols);
assertTrue(
- String.format("Expected all protocols for server %s to be defined in "
- + "%s. Server contains protocols %s. Policy provider contains "
- + "protocols %s.", rpcServerClass.getName(),
- HDFSPolicyProvider.class.getName(), serverProtocols,
- policyProviderProtocols),
- policyProviderProtocols.containsAll(serverProtocols));
+ String.format("Following protocols for server %s are not defined in "
+ + "%s: %s",
+ rpcServerClass.getName(), HDFSPolicyProvider.class.getName(),
+ Arrays.toString(differenceSet.toArray())),
+ differenceSet.isEmpty());
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org