You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2014/07/23 03:47:33 UTC
svn commit: r1612742 [3/3] - in
/hadoop/common/branches/MR-2841/hadoop-common-project: hadoop-auth/
hadoop-common/ hadoop-common/src/main/bin/ hadoop-common/src/main/java/
hadoop-common/src/main/java/org/apache/hadoop/crypto/key/
hadoop-common/src/main...
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Wed Jul 23 01:47:28 2014
@@ -111,10 +111,12 @@ public class TestProxyUsers {
groupMappingClassName);
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(NETGROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -135,10 +137,12 @@ public class TestProxyUsers {
public void testProxyUsers() throws Exception {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -168,10 +172,12 @@ public class TestProxyUsers {
public void testProxyUsersWithUserConf() throws Exception {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserUserConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -202,10 +208,12 @@ public class TestProxyUsers {
public void testWildcardGroup() {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
"*");
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -236,10 +244,12 @@ public class TestProxyUsers {
public void testWildcardUser() {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserUserConfKey(REAL_USER_NAME),
"*");
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -270,10 +280,12 @@ public class TestProxyUsers {
public void testWildcardIP() {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
"*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -301,10 +313,12 @@ public class TestProxyUsers {
public void testIPRange() {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
"*");
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP_RANGE);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@@ -324,16 +338,19 @@ public class TestProxyUsers {
public void testWithDuplicateProxyGroups() throws Exception {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> groupsToBeProxied =
ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME));
assertEquals (1,groupsToBeProxied.size());
}
@@ -342,16 +359,19 @@ public class TestProxyUsers {
public void testWithDuplicateProxyHosts() throws Exception {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider()
+ .getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP)));
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> hosts =
ProxyUsers.getDefaultImpersonationProvider().getProxyHosts().get(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME));
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME));
assertEquals (1,hosts.size());
}
@@ -391,26 +411,73 @@ public class TestProxyUsers {
public void testWithProxyGroupsAndUsersWithSpaces() throws Exception {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserUserConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(PROXY_USER_NAME + " ",AUTHORIZED_PROXY_USER_NAME, "ONEMORE")));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> groupsToBeProxied =
ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME));
assertEquals (GROUP_NAMES.length, groupsToBeProxied.size());
}
+ @Test(expected = IllegalArgumentException.class)
+ public void testProxyUsersWithNullPrefix() throws Exception {
+ ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration(false),
+ null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testProxyUsersWithEmptyPrefix() throws Exception {
+ ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration(false),
+ "");
+ }
+
+ @Test
+ public void testProxyUsersWithCustomPrefix() throws Exception {
+ Configuration conf = new Configuration(false);
+ conf.set("x." + REAL_USER_NAME + ".users",
+ StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
+ conf.set("x." + REAL_USER_NAME+ ".hosts", PROXY_IP);
+ ProxyUsers.refreshSuperUserGroupsConfiguration(conf, "x");
+
+
+ // First try proxying a user that's allowed
+ UserGroupInformation realUserUgi = UserGroupInformation
+ .createRemoteUser(REAL_USER_NAME);
+ UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+ AUTHORIZED_PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+ // From good IP
+ assertAuthorized(proxyUserUgi, "1.2.3.4");
+ // From bad IP
+ assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+
+ // Now try proxying a user that's not allowed
+ realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+ proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+ PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+ // From good IP
+ assertNotAuthorized(proxyUserUgi, "1.2.3.4");
+ // From bad IP
+ assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+ }
+
private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
try {
@@ -430,6 +497,11 @@ public class TestProxyUsers {
}
static class TestDummyImpersonationProvider implements ImpersonationProvider {
+
+ @Override
+ public void init(String configurationPrefix) {
+ }
+
/**
* Authorize a user (superuser) to impersonate another user (user1) if the
* superuser belongs to the group "sudo_user1" .
@@ -460,11 +532,13 @@ public class TestProxyUsers {
public static void loadTest(String ipString, int testRange) {
Configuration conf = new Configuration();
conf.set(
- DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set(
- DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+ DefaultImpersonationProvider.getTestProvider().
+ getProxySuperuserIpConfKey(REAL_USER_NAME),
ipString
);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/conf/kms-acls.xml Wed Jul 23 01:47:28 2014
@@ -79,4 +79,19 @@
</description>
</property>
+ <property>
+ <name>hadoop.kms.acl.GENERATE_EEK</name>
+ <value>*</value>
+ <description>
+ ACL for generateEncryptedKey CryptoExtension operations
+ </description>
+ </property>
+
+ <property>
+ <name>hadoop.kms.acl.DECRYPT_EEK</name>
+ <value>*</value>
+ <description>
+ ACL for decrypt EncryptedKey CryptoExtension operations
+ </description>
+ </property>
</configuration>
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java Wed Jul 23 01:47:28 2014
@@ -20,6 +20,8 @@ package org.apache.hadoop.crypto.key.kms
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
@@ -29,6 +31,7 @@ import org.apache.hadoop.util.StringUtil
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
@@ -39,10 +42,14 @@ import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.SecurityContext;
+
+import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.Principal;
import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -61,8 +68,10 @@ public class KMS {
private static final String GET_CURRENT_KEY = "GET_CURRENT_KEY";
private static final String GET_KEY_VERSIONS = "GET_KEY_VERSIONS";
private static final String GET_METADATA = "GET_METADATA";
+ private static final String GENERATE_EEK = "GENERATE_EEK";
+ private static final String DECRYPT_EEK = "DECRYPT_EEK";
- private KeyProvider provider;
+ private KeyProviderCryptoExtension provider;
public KMS() throws Exception {
provider = KMSWebApp.getKeyProvider();
@@ -289,6 +298,92 @@ public class KMS {
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @GET
+ @Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
+ KMSRESTConstants.EEK_SUB_RESOURCE)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response generateEncryptedKeys(
+ @Context SecurityContext securityContext,
+ @PathParam("name") String name,
+ @QueryParam(KMSRESTConstants.EEK_OP) String edekOp,
+ @DefaultValue("1")
+ @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) int numKeys)
+ throws Exception {
+ Principal user = getPrincipal(securityContext);
+ KMSClientProvider.checkNotEmpty(name, "name");
+ KMSClientProvider.checkNotNull(edekOp, "eekOp");
+
+ Object retJSON;
+ if (edekOp.equals(KMSRESTConstants.EEK_GENERATE)) {
+ assertAccess(KMSACLs.Type.GENERATE_EEK, user, GENERATE_EEK, name);
+
+ List<EncryptedKeyVersion> retEdeks =
+ new LinkedList<EncryptedKeyVersion>();
+ try {
+ for (int i = 0; i < numKeys; i ++) {
+ retEdeks.add(provider.generateEncryptedKey(name));
+ }
+ } catch (Exception e) {
+ throw new IOException(e);
+ }
+ KMSAudit.ok(user, GENERATE_EEK, name, "");
+ retJSON = new ArrayList();
+ for (EncryptedKeyVersion edek : retEdeks) {
+ ((ArrayList)retJSON).add(KMSServerJSONUtils.toJSON(edek));
+ }
+ } else {
+ throw new IllegalArgumentException("Wrong " + KMSRESTConstants.EEK_OP +
+ " value, it must be " + KMSRESTConstants.EEK_GENERATE + " or " +
+ KMSRESTConstants.EEK_DECRYPT);
+ }
+ KMSWebApp.getGenerateEEKCallsMeter().mark();
+ return Response.ok().type(MediaType.APPLICATION_JSON).entity(retJSON)
+ .build();
+ }
+
+ @SuppressWarnings("rawtypes")
+ @POST
+ @Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}/" +
+ KMSRESTConstants.EEK_SUB_RESOURCE)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response decryptEncryptedKey(@Context SecurityContext securityContext,
+ @PathParam("versionName") String versionName,
+ @QueryParam(KMSRESTConstants.EEK_OP) String eekOp,
+ Map jsonPayload)
+ throws Exception {
+ Principal user = getPrincipal(securityContext);
+ KMSClientProvider.checkNotEmpty(versionName, "versionName");
+ KMSClientProvider.checkNotNull(eekOp, "eekOp");
+
+ String keyName = (String) jsonPayload.get(KMSRESTConstants.NAME_FIELD);
+ String ivStr = (String) jsonPayload.get(KMSRESTConstants.IV_FIELD);
+ String encMaterialStr =
+ (String) jsonPayload.get(KMSRESTConstants.MATERIAL_FIELD);
+ Object retJSON;
+ if (eekOp.equals(KMSRESTConstants.EEK_DECRYPT)) {
+ assertAccess(KMSACLs.Type.DECRYPT_EEK, user, DECRYPT_EEK, versionName);
+ KMSClientProvider.checkNotNull(ivStr, KMSRESTConstants.IV_FIELD);
+ byte[] iv = Base64.decodeBase64(ivStr);
+ KMSClientProvider.checkNotNull(encMaterialStr,
+ KMSRESTConstants.MATERIAL_FIELD);
+ byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
+ KeyProvider.KeyVersion retKeyVersion =
+ provider.decryptEncryptedKey(
+ new KMSClientProvider.KMSEncryptedKeyVersion(keyName, versionName,
+ iv, KeyProviderCryptoExtension.EEK, encMaterial));
+ retJSON = KMSServerJSONUtils.toJSON(retKeyVersion);
+ KMSAudit.ok(user, DECRYPT_EEK, versionName, "");
+ } else {
+ throw new IllegalArgumentException("Wrong " + KMSRESTConstants.EEK_OP +
+ " value, it must be " + KMSRESTConstants.EEK_GENERATE + " or " +
+ KMSRESTConstants.EEK_DECRYPT);
+ }
+ KMSWebApp.getDecryptEEKCallsMeter().mark();
+ return Response.ok().type(MediaType.APPLICATION_JSON).entity(retJSON)
+ .build();
+ }
+
@GET
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
KMSRESTConstants.VERSIONS_SUB_RESOURCE)
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java Wed Jul 23 01:47:28 2014
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.crypto.key.kms.server;
+import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
@@ -28,20 +29,20 @@ import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* Provides access to the <code>AccessControlList</code>s used by KMS,
* hot-reloading them if the <code>kms-acls.xml</code> file where the ACLs
* are defined has been updated.
*/
+@InterfaceAudience.Private
public class KMSACLs implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(KMSACLs.class);
public enum Type {
- CREATE, DELETE, ROLLOVER, GET, GET_KEYS, GET_METADATA, SET_KEY_MATERIAL;
+ CREATE, DELETE, ROLLOVER, GET, GET_KEYS, GET_METADATA,
+ SET_KEY_MATERIAL, GENERATE_EEK, DECRYPT_EEK;
public String getConfigKey() {
return KMSConfiguration.CONFIG_PREFIX + "acl." + this.toString();
@@ -52,13 +53,11 @@ public class KMSACLs implements Runnable
public static final int RELOADER_SLEEP_MILLIS = 1000;
- Map<Type, AccessControlList> acls;
- private ReadWriteLock lock;
+ private volatile Map<Type, AccessControlList> acls;
private ScheduledExecutorService executorService;
private long lastReload;
KMSACLs(Configuration conf) {
- lock = new ReentrantReadWriteLock();
if (conf == null) {
conf = loadACLs();
}
@@ -70,17 +69,13 @@ public class KMSACLs implements Runnable
}
private void setACLs(Configuration conf) {
- lock.writeLock().lock();
- try {
- acls = new HashMap<Type, AccessControlList>();
- for (Type aclType : Type.values()) {
- String aclStr = conf.get(aclType.getConfigKey(), ACL_DEFAULT);
- acls.put(aclType, new AccessControlList(aclStr));
- LOG.info("'{}' ACL '{}'", aclType, aclStr);
- }
- } finally {
- lock.writeLock().unlock();
+ Map<Type, AccessControlList> tempAcls = new HashMap<Type, AccessControlList>();
+ for (Type aclType : Type.values()) {
+ String aclStr = conf.get(aclType.getConfigKey(), ACL_DEFAULT);
+ tempAcls.put(aclType, new AccessControlList(aclStr));
+ LOG.info("'{}' ACL '{}'", aclType, aclStr);
}
+ acls = tempAcls;
}
@Override
@@ -120,14 +115,7 @@ public class KMSACLs implements Runnable
public boolean hasAccess(Type type, String user) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
- AccessControlList acl = null;
- lock.readLock().lock();
- try {
- acl = acls.get(type);
- } finally {
- lock.readLock().unlock();
- }
- return acl.isUserAllowed(ugi);
+ return acls.get(type).isUserAllowed(ugi);
}
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java Wed Jul 23 01:47:28 2014
@@ -34,9 +34,21 @@ public class KMSConfiguration {
public static final String CONFIG_PREFIX = "hadoop.kms.";
+ // Property to Enable/Disable Caching
+ public static final String KEY_CACHE_ENABLE = CONFIG_PREFIX +
+ "cache.enable";
+ // Timeout for the Key and Metadata Cache
public static final String KEY_CACHE_TIMEOUT_KEY = CONFIG_PREFIX +
"cache.timeout.ms";
- public static final long KEY_CACHE_TIMEOUT_DEFAULT = 10 * 1000; // 10 secs
+ // TImeout for the Current Key cache
+ public static final String CURR_KEY_CACHE_TIMEOUT_KEY = CONFIG_PREFIX +
+ "current.key.cache.timeout.ms";
+
+ public static final boolean KEY_CACHE_ENABLE_DEFAULT = true;
+ // 10 mins
+ public static final long KEY_CACHE_TIMEOUT_DEFAULT = 10 * 60 * 1000;
+ // 30 secs
+ public static final long CURR_KEY_CACHE_TIMEOUT_DEFAULT = 30 * 1000;
static Configuration getConfiguration(boolean loadHadoopDefaults,
String ... resources) {
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java Wed Jul 23 01:47:28 2014
@@ -17,8 +17,10 @@
*/
package org.apache.hadoop.crypto.key.kms.server;
+import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
import java.util.ArrayList;
@@ -39,7 +41,9 @@ public class KMSServerJSONUtils {
keyVersion.getName());
json.put(KMSRESTConstants.VERSION_NAME_FIELD,
keyVersion.getVersionName());
- json.put(KMSRESTConstants.MATERIAL_FIELD, keyVersion.getMaterial());
+ json.put(KMSRESTConstants.MATERIAL_FIELD,
+ Base64.encodeBase64URLSafeString(
+ keyVersion.getMaterial()));
}
return json;
}
@@ -56,6 +60,21 @@ public class KMSServerJSONUtils {
}
@SuppressWarnings("unchecked")
+ public static Map toJSON(EncryptedKeyVersion encryptedKeyVersion) {
+ Map json = new LinkedHashMap();
+ if (encryptedKeyVersion != null) {
+ json.put(KMSRESTConstants.VERSION_NAME_FIELD,
+ encryptedKeyVersion.getEncryptionKeyVersionName());
+ json.put(KMSRESTConstants.IV_FIELD,
+ Base64.encodeBase64URLSafeString(
+ encryptedKeyVersion.getEncryptedKeyIv()));
+ json.put(KMSRESTConstants.ENCRYPTED_KEY_VERSION_FIELD,
+ toJSON(encryptedKeyVersion.getEncryptedKeyVersion()));
+ }
+ return json;
+ }
+
+ @SuppressWarnings("unchecked")
public static Map toJSON(String keyName, KeyProvider.Metadata meta) {
Map json = new LinkedHashMap();
if (meta != null) {
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java Wed Jul 23 01:47:28 2014
@@ -20,9 +20,12 @@ package org.apache.hadoop.crypto.key.kms
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.crypto.key.CachingKeyProvider;
import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderFactory;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.authorize.AccessControlList;
@@ -34,6 +37,7 @@ import org.slf4j.bridge.SLF4JBridgeHandl
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
+
import java.io.File;
import java.net.URL;
import java.util.List;
@@ -54,6 +58,10 @@ public class KMSWebApp implements Servle
"unauthorized.calls.meter";
private static final String UNAUTHENTICATED_CALLS_METER = METRICS_PREFIX +
"unauthenticated.calls.meter";
+ private static final String GENERATE_EEK_METER = METRICS_PREFIX +
+ "generate_eek.calls.meter";
+ private static final String DECRYPT_EEK_METER = METRICS_PREFIX +
+ "decrypt_eek.calls.meter";
private static Logger LOG;
private static MetricRegistry metricRegistry;
@@ -65,8 +73,10 @@ public class KMSWebApp implements Servle
private static Meter keyCallsMeter;
private static Meter unauthorizedCallsMeter;
private static Meter unauthenticatedCallsMeter;
+ private static Meter decryptEEKCallsMeter;
+ private static Meter generateEEKCallsMeter;
private static Meter invalidCallsMeter;
- private static KeyProvider keyProvider;
+ private static KeyProviderCryptoExtension keyProviderCryptoExtension;
static {
SLF4JBridgeHandler.removeHandlersForRootLogger();
@@ -121,6 +131,10 @@ public class KMSWebApp implements Servle
metricRegistry = new MetricRegistry();
jmxReporter = JmxReporter.forRegistry(metricRegistry).build();
jmxReporter.start();
+ generateEEKCallsMeter = metricRegistry.register(GENERATE_EEK_METER,
+ new Meter());
+ decryptEEKCallsMeter = metricRegistry.register(DECRYPT_EEK_METER,
+ new Meter());
adminCallsMeter = metricRegistry.register(ADMIN_CALLS_METER, new Meter());
keyCallsMeter = metricRegistry.register(KEY_CALLS_METER, new Meter());
invalidCallsMeter = metricRegistry.register(INVALID_CALLS_METER,
@@ -149,11 +163,23 @@ public class KMSWebApp implements Servle
"the first provider",
kmsConf.get(KeyProviderFactory.KEY_PROVIDER_PATH));
}
- keyProvider = providers.get(0);
- long timeOutMillis =
- kmsConf.getLong(KMSConfiguration.KEY_CACHE_TIMEOUT_KEY,
- KMSConfiguration.KEY_CACHE_TIMEOUT_DEFAULT);
- keyProvider = new KMSCacheKeyProvider(keyProvider, timeOutMillis);
+ KeyProvider keyProvider = providers.get(0);
+ if (kmsConf.getBoolean(KMSConfiguration.KEY_CACHE_ENABLE,
+ KMSConfiguration.KEY_CACHE_ENABLE_DEFAULT)) {
+ long keyTimeOutMillis =
+ kmsConf.getLong(KMSConfiguration.KEY_CACHE_TIMEOUT_KEY,
+ KMSConfiguration.KEY_CACHE_TIMEOUT_DEFAULT);
+ long currKeyTimeOutMillis =
+ kmsConf.getLong(KMSConfiguration.CURR_KEY_CACHE_TIMEOUT_KEY,
+ KMSConfiguration.CURR_KEY_CACHE_TIMEOUT_DEFAULT);
+ keyProvider = new CachingKeyProvider(keyProvider, keyTimeOutMillis,
+ currKeyTimeOutMillis);
+ }
+ keyProviderCryptoExtension = KeyProviderCryptoExtension.
+ createKeyProviderCryptoExtension(keyProvider);
+ keyProviderCryptoExtension =
+ new EagerKeyGeneratorKeyProviderCryptoExtension(kmsConf,
+ keyProviderCryptoExtension);
LOG.info("KMS Started");
} catch (Throwable ex) {
@@ -200,6 +226,14 @@ public class KMSWebApp implements Servle
return invalidCallsMeter;
}
+ public static Meter getGenerateEEKCallsMeter() {
+ return generateEEKCallsMeter;
+ }
+
+ public static Meter getDecryptEEKCallsMeter() {
+ return decryptEEKCallsMeter;
+ }
+
public static Meter getUnauthorizedCallsMeter() {
return unauthorizedCallsMeter;
}
@@ -208,7 +242,7 @@ public class KMSWebApp implements Servle
return unauthenticatedCallsMeter;
}
- public static KeyProvider getKeyProvider() {
- return keyProvider;
+ public static KeyProviderCryptoExtension getKeyProvider() {
+ return keyProviderCryptoExtension;
}
}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/site/apt/index.apt.vm Wed Jul 23 01:47:28 2014
@@ -72,22 +72,35 @@ Hadoop Key Management Server (KMS) - Doc
KMS caches keys for short period of time to avoid excessive hits to the
underlying key provider.
- The cache is used with the following 2 methods only, <<<getCurrentKey()>>>
- and <<<getKeyVersion()>>>.
+ The Cache is enabled by default (can be dissabled by setting the
+ <<<hadoop.kms.cache.enable>>> boolean property to false)
+
+ The cache is used with the following 3 methods only, <<<getCurrentKey()>>>
+ and <<<getKeyVersion()>>> and <<<getMetadata()>>>.
For the <<<getCurrentKey()>>> method, cached entries are kept for a maximum
- of 1000 millisecond regardless the number of times the key is being access
+ of 30000 millisecond regardless the number of times the key is being access
(to avoid stale keys to be considered current).
For the <<<getKeyVersion()>>> method, cached entries are kept with a default
- inactivity timeout of 10000 milliseconds. This time out is configurable via
- the following property in the <<<etc/hadoop/kms-site.xml>>> configuration
- file:
+ inactivity timeout of 600000 milliseconds (10 mins). This time out is
+ configurable via the following property in the <<<etc/hadoop/kms-site.xml>>>
+ configuration file:
+---+
<property>
+ <name>hadoop.kms.cache.enable</name>
+ <value>true</value>
+ </property>
+
+ <property>
<name>hadoop.kms.cache.timeout.ms</name>
- <value>10000</value>
+ <value>600000</value>
+ </property>
+
+ <property>
+ <name>hadoop.kms.current.key.cache.timeout.ms</name>
+ <value>30000</value>
</property>
+---+
@@ -266,6 +279,25 @@ $ keytool -genkey -alias tomcat -keyalg
to provide the key material when creating or rolling a key.
</description>
</property>
+
+ <property>
+ <name>hadoop.kms.acl.GENERATE_EEK</name>
+ <value>*</value>
+ <description>
+ ACL for generateEncryptedKey
+ CryptoExtension operations
+ </description>
+ </property>
+
+ <property>
+ <name>hadoop.kms.acl.DECRYPT_EEK</name>
+ <value>*</value>
+ <description>
+ ACL for decrypt EncryptedKey
+ CryptoExtension operations
+ </description>
+ </property>
+</configuration>
+---+
** KMS HTTP REST API
@@ -383,6 +415,70 @@ Content-Type: application/json
}
+---+
+
+*** Generate Encrypted Key for Current KeyVersion
+
+ <REQUEST:>
+
++---+
+GET http://HOST:PORT/kms/v1/key/<key-name>/_eek?eek_op=generate&num_keys=<number-of-keys-to-generate>
++---+
+
+ <RESPONSE:>
+
++---+
+200 OK
+Content-Type: application/json
+[
+ {
+ "versionName" : "encryptionVersionName",
+ "iv" : "<iv>", //base64
+ "encryptedKeyVersion" : {
+ "versionName" : "EEK",
+ "material" : "<material>", //base64
+ }
+ },
+ {
+ "versionName" : "encryptionVersionName",
+ "iv" : "<iv>", //base64
+ "encryptedKeyVersion" : {
+ "versionName" : "EEK",
+ "material" : "<material>", //base64
+ }
+ },
+ ...
+]
++---+
+
+*** Decrypt Encrypted Key
+
+ <REQUEST:>
+
++---+
+POST http://HOST:PORT/kms/v1/keyversion/<version-name>/_eek?ee_op=decrypt
+Content-Type: application/json
+
+{
+ "name" : "<key-name>",
+ "iv" : "<iv>", //base64
+ "material" : "<material>", //base64
+}
+
++---+
+
+ <RESPONSE:>
+
++---+
+200 OK
+Content-Type: application/json
+
+{
+ "name" : "EK",
+ "material" : "<material>", //base64
+}
++---+
+
+
*** Get Key Version
<REQUEST:>
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java Wed Jul 23 01:47:28 2014
@@ -19,6 +19,9 @@ package org.apache.hadoop.crypto.key.kms
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
+import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.authorize.AuthorizationException;
@@ -36,6 +39,7 @@ import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.LoginContext;
+
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
@@ -267,7 +271,7 @@ public class TestKMS {
}
}
- private void doAs(String user, final PrivilegedExceptionAction<Void> action)
+ private <T> T doAs(String user, final PrivilegedExceptionAction<T> action)
throws Exception {
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(user));
@@ -280,7 +284,7 @@ public class TestKMS {
try {
loginContext.login();
subject = loginContext.getSubject();
- Subject.doAs(subject, action);
+ return Subject.doAs(subject, action);
} finally {
loginContext.logout();
}
@@ -474,6 +478,32 @@ public class TestKMS {
Assert.assertNotNull(kms1[0].getCreated());
Assert.assertTrue(started.before(kms1[0].getCreated()));
+ // test generate and decryption of EEK
+ KeyProvider.KeyVersion kv = kp.getCurrentKey("k1");
+ KeyProviderCryptoExtension kpExt =
+ KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp);
+
+ EncryptedKeyVersion ek1 = kpExt.generateEncryptedKey(kv.getName());
+ Assert.assertEquals(KeyProviderCryptoExtension.EEK,
+ ek1.getEncryptedKeyVersion().getVersionName());
+ Assert.assertNotNull(ek1.getEncryptedKeyVersion().getMaterial());
+ Assert.assertEquals(kv.getMaterial().length,
+ ek1.getEncryptedKeyVersion().getMaterial().length);
+ KeyProvider.KeyVersion k1 = kpExt.decryptEncryptedKey(ek1);
+ Assert.assertEquals(KeyProviderCryptoExtension.EK, k1.getVersionName());
+ KeyProvider.KeyVersion k1a = kpExt.decryptEncryptedKey(ek1);
+ Assert.assertArrayEquals(k1.getMaterial(), k1a.getMaterial());
+ Assert.assertEquals(kv.getMaterial().length, k1.getMaterial().length);
+
+ EncryptedKeyVersion ek2 = kpExt.generateEncryptedKey(kv.getName());
+ KeyProvider.KeyVersion k2 = kpExt.decryptEncryptedKey(ek2);
+ boolean isEq = true;
+ for (int i = 0; isEq && i < ek2.getEncryptedKeyVersion()
+ .getMaterial().length; i++) {
+ isEq = k2.getMaterial()[i] == k1.getMaterial()[i];
+ }
+ Assert.assertFalse(isEq);
+
// deleteKey()
kp.deleteKey("k1");
@@ -565,7 +595,7 @@ public class TestKMS {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
- conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+ conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
URI uri = createKMSUri(getKMSUrl());
final KeyProvider kp = new KMSClientProvider(uri, conf);
@@ -582,7 +612,7 @@ public class TestKMS {
Assert.fail(ex.toString());
}
try {
- kp.createKey("k", new byte[8], new KeyProvider.Options(conf));
+ kp.createKey("k", new byte[16], new KeyProvider.Options(conf));
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
@@ -598,7 +628,7 @@ public class TestKMS {
Assert.fail(ex.toString());
}
try {
- kp.rollNewVersion("k", new byte[8]);
+ kp.rollNewVersion("k", new byte[16]);
Assert.fail();
} catch (AuthorizationException ex) {
//NOP
@@ -690,7 +720,7 @@ public class TestKMS {
@Override
public Void run() throws Exception {
try {
- KeyProvider.KeyVersion kv = kp.createKey("k1", new byte[8],
+ KeyProvider.KeyVersion kv = kp.createKey("k1", new byte[16],
new KeyProvider.Options(conf));
Assert.assertNull(kv.getMaterial());
} catch (Exception ex) {
@@ -717,7 +747,8 @@ public class TestKMS {
@Override
public Void run() throws Exception {
try {
- KeyProvider.KeyVersion kv = kp.rollNewVersion("k1", new byte[8]);
+ KeyProvider.KeyVersion kv =
+ kp.rollNewVersion("k1", new byte[16]);
Assert.assertNull(kv.getMaterial());
} catch (Exception ex) {
Assert.fail(ex.toString());
@@ -726,12 +757,46 @@ public class TestKMS {
}
});
- doAs("GET", new PrivilegedExceptionAction<Void>() {
+ final KeyVersion currKv =
+ doAs("GET", new PrivilegedExceptionAction<KeyVersion>() {
@Override
- public Void run() throws Exception {
+ public KeyVersion run() throws Exception {
try {
kp.getKeyVersion("k1@0");
- kp.getCurrentKey("k1");
+ KeyVersion kv = kp.getCurrentKey("k1");
+ return kv;
+ } catch (Exception ex) {
+ Assert.fail(ex.toString());
+ }
+ return null;
+ }
+ });
+
+ final EncryptedKeyVersion encKv =
+ doAs("GENERATE_EEK",
+ new PrivilegedExceptionAction<EncryptedKeyVersion>() {
+ @Override
+ public EncryptedKeyVersion run() throws Exception {
+ try {
+ KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.
+ createKeyProviderCryptoExtension(kp);
+ EncryptedKeyVersion ek1 =
+ kpCE.generateEncryptedKey(currKv.getName());
+ return ek1;
+ } catch (Exception ex) {
+ Assert.fail(ex.toString());
+ }
+ return null;
+ }
+ });
+
+ doAs("DECRYPT_EEK", new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ try {
+ KeyProviderCryptoExtension kpCE = KeyProviderCryptoExtension.
+ createKeyProviderCryptoExtension(kp);
+ kpCE.decryptEncryptedKey(encKv);
} catch (Exception ex) {
Assert.fail(ex.toString());
}
@@ -817,7 +882,7 @@ public class TestKMS {
@Override
public Void call() throws Exception {
final Configuration conf = new Configuration();
- conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 64);
+ conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128);
URI uri = createKMSUri(getKMSUrl());
final KeyProvider kp = new KMSClientProvider(uri, conf);
@@ -889,6 +954,30 @@ public class TestKMS {
Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
}
+ caughtTimeout = false;
+ try {
+ KeyProvider kp = new KMSClientProvider(uri, conf);
+ KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp)
+ .generateEncryptedKey("a");
+ } catch (SocketTimeoutException e) {
+ caughtTimeout = true;
+ } catch (IOException e) {
+ Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
+ }
+
+ caughtTimeout = false;
+ try {
+ KeyProvider kp = new KMSClientProvider(uri, conf);
+ KeyProviderCryptoExtension.createKeyProviderCryptoExtension(kp)
+ .decryptEncryptedKey(
+ new KMSClientProvider.KMSEncryptedKeyVersion("a",
+ "a", new byte[] {1, 2}, "EEK", new byte[] {1, 2}));
+ } catch (SocketTimeoutException e) {
+ caughtTimeout = true;
+ } catch (IOException e) {
+ Assert.assertTrue("Caught unexpected exception" + e.toString(), false);
+ }
+
Assert.assertTrue(caughtTimeout);
sock.close();
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java Wed Jul 23 01:47:28 2014
@@ -71,7 +71,16 @@ public class NfsExports {
private static final Pattern CIDR_FORMAT_LONG =
Pattern.compile(SLASH_FORMAT_LONG);
-
+
+ // Hostnames are composed of series of 'labels' concatenated with dots.
+ // Labels can be between 1-63 characters long, and can only take
+ // letters, digits & hyphens. They cannot start and end with hyphens. For
+ // more details, refer RFC-1123 & http://en.wikipedia.org/wiki/Hostname
+ private static final String LABEL_FORMAT =
+ "[a-zA-Z0-9]([a-zA-Z0-9\\-]{0,61}[a-zA-Z0-9])?";
+ private static final Pattern HOSTNAME_FORMAT =
+ Pattern.compile("^(" + LABEL_FORMAT + "\\.)*" + LABEL_FORMAT + "$");
+
static class AccessCacheEntry implements LightWeightCache.Entry{
private final String hostAddr;
private AccessPrivilege access;
@@ -381,10 +390,14 @@ public class NfsExports {
LOG.debug("Using Regex match for '" + host + "' and " + privilege);
}
return new RegexMatch(privilege, host);
+ } else if (HOSTNAME_FORMAT.matcher(host).matches()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Using exact match for '" + host + "' and " + privilege);
+ }
+ return new ExactMatch(privilege, host);
+ } else {
+ throw new IllegalArgumentException("Invalid hostname provided '" + host
+ + "'");
}
- if (LOG.isDebugEnabled()) {
- LOG.debug("Using exact match for '" + host + "' and " + privilege);
- }
- return new ExactMatch(privilege, host);
}
-}
\ No newline at end of file
+}
Modified: hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java?rev=1612742&r1=1612741&r2=1612742&view=diff
==============================================================================
--- hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java (original)
+++ hadoop/common/branches/MR-2841/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsExports.java Wed Jul 23 01:47:28 2014
@@ -194,4 +194,16 @@ public class TestNfsExports {
} while ((System.nanoTime() - startNanos) / NanosPerMillis < 5000);
Assert.assertEquals(AccessPrivilege.NONE, ap);
}
+
+ @Test(expected=IllegalArgumentException.class)
+ public void testInvalidHost() {
+ NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+ "foo#bar");
+ }
+
+ @Test(expected=IllegalArgumentException.class)
+ public void testInvalidSeparator() {
+ NfsExports matcher = new NfsExports(CacheSize, ExpirationPeriod,
+ "foo ro : bar rw");
+ }
}