You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2014/08/20 03:34:41 UTC
svn commit: r1619019 [5/6] - in
/hadoop/common/branches/YARN-1051/hadoop-common-project: hadoop-auth/
hadoop-auth/dev-support/
hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/
hadoop-auth/src/main/java/org/apache/hadoop/secur...
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java Wed Aug 20 01:34:29 2014
@@ -100,9 +100,9 @@ public class TestKeyProviderFactory {
static void checkSpecificProvider(Configuration conf,
String ourUrl) throws Exception {
KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
- byte[] key1 = new byte[32];
- byte[] key2 = new byte[32];
- byte[] key3 = new byte[32];
+ byte[] key1 = new byte[16];
+ byte[] key2 = new byte[16];
+ byte[] key3 = new byte[16];
for(int i =0; i < key1.length; ++i) {
key1[i] = (byte) i;
key2[i] = (byte) (i * 2);
@@ -146,7 +146,7 @@ public class TestKeyProviderFactory {
KeyProvider.options(conf).setBitLength(8));
assertTrue("should throw", false);
} catch (IOException e) {
- assertEquals("Wrong key length. Required 8, but got 256", e.getMessage());
+ assertEquals("Wrong key length. Required 8, but got 128", e.getMessage());
}
provider.createKey("key4", new byte[]{1},
KeyProvider.options(conf).setBitLength(8));
@@ -162,7 +162,7 @@ public class TestKeyProviderFactory {
provider.rollNewVersion("key4", key1);
assertTrue("should throw", false);
} catch (IOException e) {
- assertEquals("Wrong key length. Required 8, but got 256", e.getMessage());
+ assertEquals("Wrong key length. Required 8, but got 128", e.getMessage());
}
try {
provider.rollNewVersion("no-such-key", key1);
@@ -220,15 +220,80 @@ public class TestKeyProviderFactory {
assertTrue(s.getPermission().toString().equals("rwx------"));
assertTrue(file + " should exist", file.isFile());
+ // Corrupt file and Check if JKS can reload from _OLD file
+ File oldFile = new File(file.getPath() + "_OLD");
+ file.renameTo(oldFile);
+ file.delete();
+ file.createNewFile();
+ assertTrue(oldFile.exists());
+ KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
+ assertTrue(file.exists());
+ assertTrue(oldFile + "should be deleted", !oldFile.exists());
+ verifyAfterReload(file, provider);
+ assertTrue(!oldFile.exists());
+
+ // _NEW and current file should not exist together
+ File newFile = new File(file.getPath() + "_NEW");
+ newFile.createNewFile();
+ try {
+ provider = KeyProviderFactory.getProviders(conf).get(0);
+ Assert.fail("_NEW and current file should not exist together !!");
+ } catch (Exception e) {
+ // Ignore
+ } finally {
+ if (newFile.exists()) {
+ newFile.delete();
+ }
+ }
+
+ // Load from _NEW file
+ file.renameTo(newFile);
+ file.delete();
+ try {
+ provider = KeyProviderFactory.getProviders(conf).get(0);
+ Assert.assertFalse(newFile.exists());
+ Assert.assertFalse(oldFile.exists());
+ } catch (Exception e) {
+ Assert.fail("JKS should load from _NEW file !!");
+ // Ignore
+ }
+ verifyAfterReload(file, provider);
+
+ // _NEW exists but corrupt.. must load from _OLD
+ newFile.createNewFile();
+ file.renameTo(oldFile);
+ file.delete();
+ try {
+ provider = KeyProviderFactory.getProviders(conf).get(0);
+ Assert.assertFalse(newFile.exists());
+ Assert.assertFalse(oldFile.exists());
+ } catch (Exception e) {
+ Assert.fail("JKS should load from _OLD file !!");
+ // Ignore
+ } finally {
+ if (newFile.exists()) {
+ newFile.delete();
+ }
+ }
+ verifyAfterReload(file, provider);
+
// check permission retention after explicit change
fs.setPermission(path, new FsPermission("777"));
checkPermissionRetention(conf, ourUrl, path);
}
+ private void verifyAfterReload(File file, KeyProvider provider)
+ throws IOException {
+ List<String> existingKeys = provider.getKeys();
+ assertTrue(existingKeys.contains("key4"));
+ assertTrue(existingKeys.contains("key3"));
+ assertTrue(file.exists());
+ }
+
public void checkPermissionRetention(Configuration conf, String ourUrl, Path path) throws Exception {
KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
// let's add a new key and flush and check that permissions are still set to 777
- byte[] key = new byte[32];
+ byte[] key = new byte[16];
for(int i =0; i < key.length; ++i) {
key[i] = (byte) i;
}
@@ -261,7 +326,7 @@ public class TestKeyProviderFactory {
conf.set(JavaKeyStoreProvider.KEYSTORE_PASSWORD_FILE_KEY,
"javakeystoreprovider.password");
KeyProvider provider = KeyProviderFactory.getProviders(conf).get(0);
- provider.createKey("key3", new byte[32], KeyProvider.options(conf));
+ provider.createKey("key3", new byte[16], KeyProvider.options(conf));
provider.flush();
} catch (Exception ex) {
Assert.fail("could not create keystore with password file");
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Wed Aug 20 01:34:29 2014
@@ -73,7 +73,7 @@ public class TestKeyShell {
private void deleteKey(KeyShell ks, String keyName) throws Exception {
int rc;
outContent.reset();
- final String[] delArgs = {"delete", keyName, "--provider", jceksProvider};
+ final String[] delArgs = {"delete", keyName, "-provider", jceksProvider};
rc = ks.run(delArgs);
assertEquals(0, rc);
assertTrue(outContent.toString().contains(keyName + " has been " +
@@ -90,8 +90,8 @@ public class TestKeyShell {
private String listKeys(KeyShell ks, boolean wantMetadata) throws Exception {
int rc;
outContent.reset();
- final String[] listArgs = {"list", "--provider", jceksProvider };
- final String[] listArgsM = {"list", "--metadata", "--provider", jceksProvider };
+ final String[] listArgs = {"list", "-provider", jceksProvider };
+ final String[] listArgsM = {"list", "-metadata", "-provider", jceksProvider };
rc = ks.run(wantMetadata ? listArgsM : listArgs);
assertEquals(0, rc);
return outContent.toString();
@@ -106,11 +106,11 @@ public class TestKeyShell {
ks.setConf(new Configuration());
outContent.reset();
- final String[] args1 = {"create", keyName, "--provider", jceksProvider};
+ final String[] args1 = {"create", keyName, "-provider", jceksProvider};
rc = ks.run(args1);
assertEquals(0, rc);
assertTrue(outContent.toString().contains(keyName + " has been " +
- "successfully created."));
+ "successfully created"));
String listOut = listKeys(ks, false);
assertTrue(listOut.contains(keyName));
@@ -121,7 +121,7 @@ public class TestKeyShell {
assertTrue(listOut.contains("created"));
outContent.reset();
- final String[] args2 = {"roll", keyName, "--provider", jceksProvider};
+ final String[] args2 = {"roll", keyName, "-provider", jceksProvider};
rc = ks.run(args2);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("key1 has been successfully " +
@@ -137,15 +137,15 @@ public class TestKeyShell {
@Test
public void testKeySuccessfulCreationWithDescription() throws Exception {
outContent.reset();
- final String[] args1 = {"create", "key1", "--provider", jceksProvider,
- "--description", "someDescription"};
+ final String[] args1 = {"create", "key1", "-provider", jceksProvider,
+ "-description", "someDescription"};
int rc = 0;
KeyShell ks = new KeyShell();
ks.setConf(new Configuration());
rc = ks.run(args1);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("key1 has been successfully " +
- "created."));
+ "created"));
String listOut = listKeys(ks, true);
assertTrue(listOut.contains("description"));
@@ -154,7 +154,7 @@ public class TestKeyShell {
@Test
public void testInvalidKeySize() throws Exception {
- final String[] args1 = {"create", "key1", "--size", "56", "--provider",
+ final String[] args1 = {"create", "key1", "-size", "56", "-provider",
jceksProvider};
int rc = 0;
@@ -167,7 +167,7 @@ public class TestKeyShell {
@Test
public void testInvalidCipher() throws Exception {
- final String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider",
+ final String[] args1 = {"create", "key1", "-cipher", "LJM", "-provider",
jceksProvider};
int rc = 0;
@@ -180,7 +180,7 @@ public class TestKeyShell {
@Test
public void testInvalidProvider() throws Exception {
- final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
+ final String[] args1 = {"create", "key1", "-cipher", "AES", "-provider",
"sdff://file/tmp/keystore.jceks"};
int rc = 0;
@@ -194,7 +194,7 @@ public class TestKeyShell {
@Test
public void testTransientProviderWarning() throws Exception {
- final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
+ final String[] args1 = {"create", "key1", "-cipher", "AES", "-provider",
"user:///"};
int rc = 0;
@@ -224,8 +224,8 @@ public class TestKeyShell {
@Test
public void testFullCipher() throws Exception {
final String keyName = "key1";
- final String[] args1 = {"create", keyName, "--cipher", "AES/CBC/pkcs5Padding",
- "--provider", jceksProvider};
+ final String[] args1 = {"create", keyName, "-cipher", "AES/CBC/pkcs5Padding",
+ "-provider", jceksProvider};
int rc = 0;
KeyShell ks = new KeyShell();
@@ -233,7 +233,7 @@ public class TestKeyShell {
rc = ks.run(args1);
assertEquals(0, rc);
assertTrue(outContent.toString().contains(keyName + " has been " +
- "successfully " + "created."));
+ "successfully created"));
deleteKey(ks, keyName);
}
@@ -245,12 +245,12 @@ public class TestKeyShell {
ks.setConf(new Configuration());
/* Simple creation test */
- final String[] args1 = {"create", "keyattr1", "--provider", jceksProvider,
- "--attr", "foo=bar"};
+ final String[] args1 = {"create", "keyattr1", "-provider", jceksProvider,
+ "-attr", "foo=bar"};
rc = ks.run(args1);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("keyattr1 has been " +
- "successfully " + "created."));
+ "successfully created"));
/* ...and list to see that we have the attr */
String listOut = listKeys(ks, true);
@@ -259,8 +259,8 @@ public class TestKeyShell {
/* Negative tests: no attribute */
outContent.reset();
- final String[] args2 = {"create", "keyattr2", "--provider", jceksProvider,
- "--attr", "=bar"};
+ final String[] args2 = {"create", "keyattr2", "-provider", jceksProvider,
+ "-attr", "=bar"};
rc = ks.run(args2);
assertEquals(1, rc);
@@ -288,10 +288,10 @@ public class TestKeyShell {
/* Test several attrs together... */
outContent.reset();
- final String[] args3 = {"create", "keyattr3", "--provider", jceksProvider,
- "--attr", "foo = bar",
- "--attr", " glarch =baz ",
- "--attr", "abc=def"};
+ final String[] args3 = {"create", "keyattr3", "-provider", jceksProvider,
+ "-attr", "foo = bar",
+ "-attr", " glarch =baz ",
+ "-attr", "abc=def"};
rc = ks.run(args3);
assertEquals(0, rc);
@@ -304,9 +304,9 @@ public class TestKeyShell {
/* Negative test - repeated attributes should fail */
outContent.reset();
- final String[] args4 = {"create", "keyattr4", "--provider", jceksProvider,
- "--attr", "foo=bar",
- "--attr", "foo=glarch"};
+ final String[] args4 = {"create", "keyattr4", "-provider", jceksProvider,
+ "-attr", "foo=bar",
+ "-attr", "foo=glarch"};
rc = ks.run(args4);
assertEquals(1, rc);
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java Wed Aug 20 01:34:29 2014
@@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
@@ -201,6 +202,8 @@ public class TestHarFileSystem {
public void removeXAttr(Path path, String name) throws IOException;
public AclStatus getAclStatus(Path path) throws IOException;
+
+ public void access(Path path, FsAction mode) throws IOException;
}
@Test
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java Wed Aug 20 01:34:29 2014
@@ -414,7 +414,7 @@ public class TestHttpServer extends Http
assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
+ servlet, user));
}
- assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, getHttpStatusCode(
+ assertEquals(HttpURLConnection.HTTP_FORBIDDEN, getHttpStatusCode(
serverURL + servlet, "userE"));
}
myServer.stop();
@@ -474,7 +474,7 @@ public class TestHttpServer extends Http
response = Mockito.mock(HttpServletResponse.class);
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true);
Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response));
- Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+ Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString());
//authorization ON & user NOT NULL & ACLs NULL
response = Mockito.mock(HttpServletResponse.class);
@@ -487,7 +487,7 @@ public class TestHttpServer extends Http
Mockito.when(acls.isUserAllowed(Mockito.<UserGroupInformation>any())).thenReturn(false);
Mockito.when(context.getAttribute(HttpServer2.ADMINS_ACL)).thenReturn(acls);
Assert.assertFalse(HttpServer2.hasAdministratorAccess(context, request, response));
- Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), Mockito.anyString());
+ Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString());
//authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs
response = Mockito.mock(HttpServletResponse.class);
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java Wed Aug 20 01:34:29 2014
@@ -583,14 +583,14 @@ public class TestRPC {
}
MetricsRecordBuilder rb = getMetrics(server.rpcMetrics.name());
if (expectFailure) {
- assertCounter("RpcAuthorizationFailures", 1, rb);
+ assertCounter("RpcAuthorizationFailures", 1L, rb);
} else {
- assertCounter("RpcAuthorizationSuccesses", 1, rb);
+ assertCounter("RpcAuthorizationSuccesses", 1L, rb);
}
//since we don't have authentication turned ON, we should see
// 0 for the authentication successes and 0 for failure
- assertCounter("RpcAuthenticationFailures", 0, rb);
- assertCounter("RpcAuthenticationSuccesses", 0, rb);
+ assertCounter("RpcAuthenticationFailures", 0L, rb);
+ assertCounter("RpcAuthenticationSuccesses", 0L, rb);
}
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestLdapGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestLdapGroupsMapping.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestLdapGroupsMapping.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestLdapGroupsMapping.java Wed Aug 20 01:34:29 2014
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.security;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.*;
import java.io.File;
@@ -38,6 +40,9 @@ import javax.naming.directory.SearchCont
import javax.naming.directory.SearchResult;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -154,4 +159,57 @@ public class TestLdapGroupsMapping {
Assert.assertEquals("hadoop",
mapping.extractPassword(secretFile.getPath()));
}
+
+ @Test
+ public void testConfGetPassword() throws Exception {
+ File testDir = new File(System.getProperty("test.build.data",
+ "target/test-dir"));
+ Configuration conf = new Configuration();
+ final String ourUrl =
+ JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+
+ File file = new File(testDir, "test.jks");
+ file.delete();
+ conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
+
+ CredentialProvider provider =
+ CredentialProviderFactory.getProviders(conf).get(0);
+ char[] bindpass = {'b', 'i', 'n', 'd', 'p', 'a', 's', 's'};
+ char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
+
+ // ensure that we get nulls when the key isn't there
+ assertEquals(null, provider.getCredentialEntry(
+ LdapGroupsMapping.BIND_PASSWORD_KEY));
+ assertEquals(null, provider.getCredentialEntry
+ (LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY));
+
+ // create new aliases
+ try {
+ provider.createCredentialEntry(
+ LdapGroupsMapping.BIND_PASSWORD_KEY, bindpass);
+
+ provider.createCredentialEntry(
+ LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY, storepass);
+ provider.flush();
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
+ }
+ // make sure we get back the right key
+ assertArrayEquals(bindpass, provider.getCredentialEntry(
+ LdapGroupsMapping.BIND_PASSWORD_KEY).getCredential());
+ assertArrayEquals(storepass, provider.getCredentialEntry(
+ LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY).getCredential());
+
+ LdapGroupsMapping mapping = new LdapGroupsMapping();
+ Assert.assertEquals("bindpass",
+ mapping.getPassword(conf, LdapGroupsMapping.BIND_PASSWORD_KEY, ""));
+ Assert.assertEquals("storepass",
+ mapping.getPassword(conf, LdapGroupsMapping.LDAP_KEYSTORE_PASSWORD_KEY,
+ ""));
+ // let's make sure that a password that doesn't exist returns an
+ // empty string as currently expected and used to trigger a call to
+ // extract password
+ Assert.assertEquals("", mapping.getPassword(conf,"invalid-alias", ""));
+ }
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java Wed Aug 20 01:34:29 2014
@@ -17,16 +17,18 @@
*/
package org.apache.hadoop.security.alias;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.alias.CredentialShell.PasswordReader;
import org.junit.Before;
import org.junit.Test;
@@ -45,7 +47,7 @@ public class TestCredShell {
@Test
public void testCredentialSuccessfulLifecycle() throws Exception {
outContent.reset();
- String[] args1 = {"create", "credential1", "--value", "p@ssw0rd", "--provider",
+ String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
int rc = 0;
CredentialShell cs = new CredentialShell();
@@ -56,14 +58,14 @@ public class TestCredShell {
"created."));
outContent.reset();
- String[] args2 = {"list", "--provider",
+ String[] args2 = {"list", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
rc = cs.run(args2);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("credential1"));
outContent.reset();
- String[] args4 = {"delete", "credential1", "--provider",
+ String[] args4 = {"delete", "credential1", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
rc = cs.run(args4);
assertEquals(0, rc);
@@ -71,7 +73,7 @@ public class TestCredShell {
"deleted."));
outContent.reset();
- String[] args5 = {"list", "--provider",
+ String[] args5 = {"list", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
rc = cs.run(args5);
assertEquals(0, rc);
@@ -80,21 +82,21 @@ public class TestCredShell {
@Test
public void testInvalidProvider() throws Exception {
- String[] args1 = {"create", "credential1", "--value", "p@ssw0rd", "--provider",
+ String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", "-provider",
"sdff://file/tmp/credstore.jceks"};
int rc = 0;
CredentialShell cs = new CredentialShell();
cs.setConf(new Configuration());
rc = cs.run(args1);
- assertEquals(-1, rc);
+ assertEquals(1, rc);
assertTrue(outContent.toString().contains("There are no valid " +
"CredentialProviders configured."));
}
@Test
public void testTransientProviderWarning() throws Exception {
- String[] args1 = {"create", "credential1", "--value", "p@ssw0rd", "--provider",
+ String[] args1 = {"create", "credential1", "-value", "p@ssw0rd", "-provider",
"user:///"};
int rc = 0;
@@ -105,7 +107,7 @@ public class TestCredShell {
assertTrue(outContent.toString().contains("WARNING: you are modifying a " +
"transient provider."));
- String[] args2 = {"delete", "credential1", "--provider", "user:///"};
+ String[] args2 = {"delete", "credential1", "-provider", "user:///"};
rc = cs.run(args2);
assertEquals(outContent.toString(), 0, rc);
assertTrue(outContent.toString().contains("credential1 has been successfully " +
@@ -122,14 +124,14 @@ public class TestCredShell {
config.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, "user:///");
cs.setConf(config);
rc = cs.run(args1);
- assertEquals(-1, rc);
+ assertEquals(1, rc);
assertTrue(outContent.toString().contains("There are no valid " +
"CredentialProviders configured."));
}
@Test
public void testPromptForCredentialWithEmptyPasswd() throws Exception {
- String[] args1 = {"create", "credential1", "--provider",
+ String[] args1 = {"create", "credential1", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
ArrayList<String> passwords = new ArrayList<String>();
passwords.add(null);
@@ -139,13 +141,13 @@ public class TestCredShell {
shell.setConf(new Configuration());
shell.setPasswordReader(new MockPasswordReader(passwords));
rc = shell.run(args1);
- assertEquals(outContent.toString(), -1, rc);
+ assertEquals(outContent.toString(), 1, rc);
assertTrue(outContent.toString().contains("Passwords don't match"));
}
@Test
public void testPromptForCredential() throws Exception {
- String[] args1 = {"create", "credential1", "--provider",
+ String[] args1 = {"create", "credential1", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
ArrayList<String> passwords = new ArrayList<String>();
passwords.add("p@ssw0rd");
@@ -159,7 +161,7 @@ public class TestCredShell {
assertTrue(outContent.toString().contains("credential1 has been successfully " +
"created."));
- String[] args2 = {"delete", "credential1", "--provider",
+ String[] args2 = {"delete", "credential1", "-provider",
"jceks://file" + tmpDir + "/credstore.jceks"};
rc = shell.run(args2);
assertEquals(0, rc);
@@ -186,4 +188,21 @@ public class TestCredShell {
System.out.println(message);
}
}
+
+ @Test
+ public void testEmptyArgList() throws Exception {
+ CredentialShell shell = new CredentialShell();
+ shell.setConf(new Configuration());
+ assertEquals(1, shell.init(new String[0]));
+ }
+
+ @Test
+ public void testCommandHelpExitsNormally() throws Exception {
+ for (String cmd : Arrays.asList("create", "list", "delete")) {
+ CredentialShell shell = new CredentialShell();
+ shell.setConf(new Configuration());
+ assertEquals("Expected help argument on " + cmd + " to return 0",
+ 0, shell.init(new String[] {cmd, "-help"}));
+ }
+ }
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestServiceAuthorization.java Wed Aug 20 01:34:29 2014
@@ -18,16 +18,22 @@
package org.apache.hadoop.security.authorize;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
+import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Test;
public class TestServiceAuthorization {
private static final String ACL_CONFIG = "test.protocol.acl";
private static final String ACL_CONFIG1 = "test.protocol1.acl";
+ private static final String ADDRESS = "0.0.0.0";
public interface TestProtocol1 extends TestProtocol {};
@@ -64,4 +70,115 @@ public class TestServiceAuthorization {
acl = serviceAuthorizationManager.getProtocolsAcls(TestProtocol1.class);
assertEquals("user2 group2", acl.getAclString());
}
+
+ @Test
+ public void testBlockedAcl() throws UnknownHostException {
+ UserGroupInformation drwho =
+ UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM",
+ new String[] { "group1", "group2" });
+
+ ServiceAuthorizationManager serviceAuthorizationManager =
+ new ServiceAuthorizationManager();
+ Configuration conf = new Configuration ();
+
+ //test without setting a blocked acl
+ conf.set(ACL_CONFIG, "user1 group1");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ } catch (AuthorizationException e) {
+ fail();
+ }
+ //now set a blocked acl with another user and another group
+ conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho2 group3");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ } catch (AuthorizationException e) {
+ fail();
+ }
+ //now set a blocked acl with the user and another group
+ conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho group3");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ fail();
+ } catch (AuthorizationException e) {
+
+ }
+ //now set a blocked acl with another user and another group
+ conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho2 group3");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ } catch (AuthorizationException e) {
+ fail();
+ }
+ //now set a blocked acl with another user and group that the user belongs to
+ conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "drwho2 group2");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ fail();
+ } catch (AuthorizationException e) {
+ //expects Exception
+ }
+ //reset blocked acl so that there is no blocked ACL
+ conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ } catch (AuthorizationException e) {
+ fail();
+ }
+ }
+
+ @Test
+ public void testDefaultBlockedAcl() throws UnknownHostException {
+ UserGroupInformation drwho =
+ UserGroupInformation.createUserForTesting("drwho@EXAMPLE.COM",
+ new String[] { "group1", "group2" });
+
+ ServiceAuthorizationManager serviceAuthorizationManager =
+ new ServiceAuthorizationManager();
+ Configuration conf = new Configuration ();
+
+ //test without setting a default blocked acl
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol1.class, conf,
+ InetAddress.getByName(ADDRESS));
+ } catch (AuthorizationException e) {
+ fail();
+ }
+
+ //set a restrictive default blocked acl and an non-restricting blocked acl for TestProtocol
+ conf.set(
+ CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_BLOCKED_ACL,
+ "user2 group2");
+ conf.set(ACL_CONFIG + ServiceAuthorizationManager.BLOCKED, "user2");
+ serviceAuthorizationManager.refresh(conf, new TestPolicyProvider());
+ //drwho is authorized to access TestProtocol
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol.class, conf,
+ InetAddress.getByName(ADDRESS));
+ } catch (AuthorizationException e) {
+ fail();
+ }
+ //drwho is not authorized to access TestProtocol1 because it uses the default blocked acl.
+ try {
+ serviceAuthorizationManager.authorize(drwho, TestProtocol1.class, conf,
+ InetAddress.getByName(ADDRESS));
+ fail();
+ } catch (AuthorizationException e) {
+ //expects Exception
+ }
+ }
+
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/KeyStoreTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/KeyStoreTestUtil.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/KeyStoreTestUtil.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/KeyStoreTestUtil.java Wed Aug 20 01:34:29 2014
@@ -19,6 +19,10 @@
package org.apache.hadoop.security.ssl;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
+
import sun.security.x509.AlgorithmId;
import sun.security.x509.CertificateAlgorithmId;
import sun.security.x509.CertificateIssuerName;
@@ -382,4 +386,41 @@ public class KeyStoreTestUtil {
writer.close();
}
}
+
+ public static void provisionPasswordsToCredentialProvider() throws Exception {
+ File testDir = new File(System.getProperty("test.build.data",
+ "target/test-dir"));
+
+ Configuration conf = new Configuration();
+ final String ourUrl =
+ JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+
+ File file = new File(testDir, "test.jks");
+ file.delete();
+ conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
+
+ CredentialProvider provider =
+ CredentialProviderFactory.getProviders(conf).get(0);
+ char[] keypass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
+ char[] storepass = {'s', 't', 'o', 'r', 'e', 'p', 'a', 's', 's'};
+
+ // create new aliases
+ try {
+ provider.createCredentialEntry(
+ FileBasedKeyStoresFactory.resolvePropertyName(SSLFactory.Mode.SERVER,
+ FileBasedKeyStoresFactory.SSL_KEYSTORE_PASSWORD_TPL_KEY),
+ storepass);
+
+ provider.createCredentialEntry(
+ FileBasedKeyStoresFactory.resolvePropertyName(SSLFactory.Mode.SERVER,
+ FileBasedKeyStoresFactory.SSL_KEYSTORE_KEYPASSWORD_TPL_KEY),
+ keypass);
+
+ // write out so that it can be found in checks
+ provider.flush();
+ } catch (Exception e) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/ssl/TestSSLFactory.java Wed Aug 20 01:34:29 2014
@@ -17,8 +17,14 @@
*/
package org.apache.hadoop.security.ssl;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -211,6 +217,13 @@ public class TestSSLFactory {
"password", "password", null);
}
+ @Test
+ public void testServerCredProviderPasswords() throws Exception {
+ KeyStoreTestUtil.provisionPasswordsToCredentialProvider();
+ checkSSLFactoryInitWithPasswords(SSLFactory.Mode.SERVER,
+ "storepass", "keypass", null, null, true);
+ }
+
/**
* Checks that SSLFactory initialization is successful with the given
* arguments. This is a helper method for writing test cases that cover
@@ -218,7 +231,7 @@ public class TestSSLFactory {
* It takes care of bootstrapping a keystore, a truststore, and SSL client or
* server configuration. Then, it initializes an SSLFactory. If no exception
* is thrown, then initialization was successful.
- *
+ *
* @param mode SSLFactory.Mode mode to test
* @param password String store password to set on keystore
* @param keyPassword String key password to set on keystore
@@ -231,6 +244,34 @@ public class TestSSLFactory {
private void checkSSLFactoryInitWithPasswords(SSLFactory.Mode mode,
String password, String keyPassword, String confPassword,
String confKeyPassword) throws Exception {
+ checkSSLFactoryInitWithPasswords(mode, password, keyPassword,
+ confPassword, confKeyPassword, false);
+ }
+
+ /**
+ * Checks that SSLFactory initialization is successful with the given
+ * arguments. This is a helper method for writing test cases that cover
+ * different combinations of settings for the store password and key password.
+ * It takes care of bootstrapping a keystore, a truststore, and SSL client or
+ * server configuration. Then, it initializes an SSLFactory. If no exception
+ * is thrown, then initialization was successful.
+ *
+ * @param mode SSLFactory.Mode mode to test
+ * @param password String store password to set on keystore
+ * @param keyPassword String key password to set on keystore
+ * @param confPassword String store password to set in SSL config file, or null
+ * to avoid setting in SSL config file
+ * @param confKeyPassword String key password to set in SSL config file, or
+ * null to avoid setting in SSL config file
+ * @param useCredProvider boolean to indicate whether passwords should be set
+ * into the config or not. When set to true nulls are set and aliases are
+ * expected to be resolved through credential provider API through the
+ * Configuration.getPassword method
+ * @throws Exception for any error
+ */
+ private void checkSSLFactoryInitWithPasswords(SSLFactory.Mode mode,
+ String password, String keyPassword, String confPassword,
+ String confKeyPassword, boolean useCredProvider) throws Exception {
String keystore = new File(KEYSTORES_DIR, "keystore.jks").getAbsolutePath();
String truststore = new File(KEYSTORES_DIR, "truststore.jks")
.getAbsolutePath();
@@ -249,10 +290,25 @@ public class TestSSLFactory {
// Create SSL configuration file, for either server or client.
final String sslConfFileName;
final Configuration sslConf;
+
+ // if the passwords are provisioned in a cred provider then don't set them
+ // in the configuration properly - expect them to be resolved through the
+ // provider
+ if (useCredProvider) {
+ confPassword = null;
+ confKeyPassword = null;
+ }
if (mode == SSLFactory.Mode.SERVER) {
sslConfFileName = "ssl-server.xml";
sslConf = KeyStoreTestUtil.createServerSSLConfig(keystore, confPassword,
confKeyPassword, truststore);
+ if (useCredProvider) {
+ File testDir = new File(System.getProperty("test.build.data",
+ "target/test-dir"));
+ final String ourUrl =
+ JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+ sslConf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl);
+ }
} else {
sslConfFileName = "ssl-client.xml";
sslConf = KeyStoreTestUtil.createClientSSLConfig(keystore, confPassword,
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java Wed Aug 20 01:34:29 2014
@@ -19,6 +19,9 @@ package org.apache.hadoop.util;
import java.nio.ByteBuffer;
import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.base.Stopwatch;
import org.apache.hadoop.fs.ChecksumException;
import org.junit.Test;
@@ -53,68 +56,113 @@ public class TestDataChecksum {
}
}
}
-
- private void doBulkTest(DataChecksum checksum, int dataLength,
- boolean useDirect) throws Exception {
- System.err.println("Testing bulk checksums of length " +
- dataLength + " with " +
- (useDirect ? "direct" : "array-backed") + " buffers");
- int numSums = (dataLength - 1)/checksum.getBytesPerChecksum() + 1;
- int sumsLength = numSums * checksum.getChecksumSize();
-
- byte data[] = new byte[dataLength +
- DATA_OFFSET_IN_BUFFER +
- DATA_TRAILER_IN_BUFFER];
- new Random().nextBytes(data);
- ByteBuffer dataBuf = ByteBuffer.wrap(
+
+ private static class Harness {
+ final DataChecksum checksum;
+ final int dataLength, sumsLength, numSums;
+ ByteBuffer dataBuf, checksumBuf;
+
+ Harness(DataChecksum checksum, int dataLength, boolean useDirect) {
+ this.checksum = checksum;
+ this.dataLength = dataLength;
+
+ numSums = (dataLength - 1)/checksum.getBytesPerChecksum() + 1;
+ sumsLength = numSums * checksum.getChecksumSize();
+
+ byte data[] = new byte[dataLength +
+ DATA_OFFSET_IN_BUFFER +
+ DATA_TRAILER_IN_BUFFER];
+ new Random().nextBytes(data);
+ dataBuf = ByteBuffer.wrap(
data, DATA_OFFSET_IN_BUFFER, dataLength);
- byte checksums[] = new byte[SUMS_OFFSET_IN_BUFFER + sumsLength];
- ByteBuffer checksumBuf = ByteBuffer.wrap(
+ byte checksums[] = new byte[SUMS_OFFSET_IN_BUFFER + sumsLength];
+ checksumBuf = ByteBuffer.wrap(
checksums, SUMS_OFFSET_IN_BUFFER, sumsLength);
-
- // Swap out for direct buffers if requested.
- if (useDirect) {
- dataBuf = directify(dataBuf);
- checksumBuf = directify(checksumBuf);
+
+ // Swap out for direct buffers if requested.
+ if (useDirect) {
+ dataBuf = directify(dataBuf);
+ checksumBuf = directify(checksumBuf);
+ }
}
-
- // calculate real checksum, make sure it passes
- checksum.calculateChunkedSums(dataBuf, checksumBuf);
- checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
-
- // Change a byte in the header and in the trailer, make sure
- // it doesn't affect checksum result
- corruptBufferOffset(checksumBuf, 0);
- checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
- corruptBufferOffset(dataBuf, 0);
- dataBuf.limit(dataBuf.limit() + 1);
- corruptBufferOffset(dataBuf, dataLength + DATA_OFFSET_IN_BUFFER);
- dataBuf.limit(dataBuf.limit() - 1);
- checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
-
- // Make sure bad checksums fail - error at beginning of array
- corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
- try {
+
+ void testCorrectness() throws ChecksumException {
+ // calculate real checksum, make sure it passes
+ checksum.calculateChunkedSums(dataBuf, checksumBuf);
checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
- fail("Did not throw on bad checksums");
- } catch (ChecksumException ce) {
- assertEquals(0, ce.getPos());
- }
- // Make sure bad checksums fail - error at end of array
- uncorruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
- corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER + sumsLength - 1);
- try {
+ // Change a byte in the header and in the trailer, make sure
+ // it doesn't affect checksum result
+ corruptBufferOffset(checksumBuf, 0);
+ checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
+ corruptBufferOffset(dataBuf, 0);
+ dataBuf.limit(dataBuf.limit() + 1);
+ corruptBufferOffset(dataBuf, dataLength + DATA_OFFSET_IN_BUFFER);
+ dataBuf.limit(dataBuf.limit() - 1);
checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
- fail("Did not throw on bad checksums");
- } catch (ChecksumException ce) {
- int expectedPos = checksum.getBytesPerChecksum() * (numSums - 1);
- assertEquals(expectedPos, ce.getPos());
- assertTrue(ce.getMessage().contains("fake file"));
+
+ // Make sure bad checksums fail - error at beginning of array
+ corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
+ try {
+ checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
+ fail("Did not throw on bad checksums");
+ } catch (ChecksumException ce) {
+ assertEquals(0, ce.getPos());
+ }
+
+ // Make sure bad checksums fail - error at end of array
+ uncorruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER);
+ corruptBufferOffset(checksumBuf, SUMS_OFFSET_IN_BUFFER + sumsLength - 1);
+ try {
+ checksum.verifyChunkedSums(dataBuf, checksumBuf, "fake file", 0);
+ fail("Did not throw on bad checksums");
+ } catch (ChecksumException ce) {
+ int expectedPos = checksum.getBytesPerChecksum() * (numSums - 1);
+ assertEquals(expectedPos, ce.getPos());
+ assertTrue(ce.getMessage().contains("fake file"));
+ }
}
}
-
+
+ private void doBulkTest(DataChecksum checksum, int dataLength,
+ boolean useDirect) throws Exception {
+ System.err.println("Testing bulk checksums of length " +
+ dataLength + " with " +
+ (useDirect ? "direct" : "array-backed") + " buffers");
+
+ new Harness(checksum, dataLength, useDirect).testCorrectness();
+ }
+
+ /**
+ * Simple performance test for the "common case" checksum usage in HDFS:
+ * computing and verifying CRC32C with 512 byte chunking on native
+ * buffers.
+ */
+ @Test
+ public void commonUsagePerfTest() throws Exception {
+ final int NUM_RUNS = 5;
+ final DataChecksum checksum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512);
+ final int dataLength = 512 * 1024 * 1024;
+ Harness h = new Harness(checksum, dataLength, true);
+
+ for (int i = 0; i < NUM_RUNS; i++) {
+ Stopwatch s = new Stopwatch().start();
+ // calculate real checksum, make sure it passes
+ checksum.calculateChunkedSums(h.dataBuf, h.checksumBuf);
+ s.stop();
+ System.err.println("Calculate run #" + i + ": " +
+ s.elapsedTime(TimeUnit.MICROSECONDS) + "us");
+
+ s = new Stopwatch().start();
+ // calculate real checksum, make sure it passes
+ checksum.verifyChunkedSums(h.dataBuf, h.checksumBuf, "fake file", 0);
+ s.stop();
+ System.err.println("Verify run #" + i + ": " +
+ s.elapsedTime(TimeUnit.MICROSECONDS) + "us");
+ }
+ }
+
@Test
public void testEquality() {
assertEquals(
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestGenericOptionsParser.java Wed Aug 20 01:34:29 2014
@@ -21,11 +21,14 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.List;
import java.util.Map;
import junit.framework.TestCase;
+import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -34,12 +37,14 @@ import org.apache.hadoop.security.Creden
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
+import org.apache.hadoop.test.GenericTestUtils;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.junit.Assert;
import com.google.common.collect.Maps;
+import static org.junit.Assert.fail;
public class TestGenericOptionsParser extends TestCase {
File testDir;
@@ -93,6 +98,67 @@ public class TestGenericOptionsParser ex
}
/**
+ * Test the case where the libjars, files and archives arguments
+ * contains an empty token, which should create an IllegalArgumentException.
+ */
+ public void testEmptyFilenames() throws Exception {
+ List<Pair<String, String>> argsAndConfNames = new ArrayList<Pair<String, String>>();
+ argsAndConfNames.add(new Pair<String, String>("-libjars", "tmpjars"));
+ argsAndConfNames.add(new Pair<String, String>("-files", "tmpfiles"));
+ argsAndConfNames.add(new Pair<String, String>("-archives", "tmparchives"));
+ for (Pair<String, String> argAndConfName : argsAndConfNames) {
+ String arg = argAndConfName.getFirst();
+ String configName = argAndConfName.getSecond();
+
+ File tmpFileOne = new File(testDir, "tmpfile1");
+ Path tmpPathOne = new Path(tmpFileOne.toString());
+ File tmpFileTwo = new File(testDir, "tmpfile2");
+ Path tmpPathTwo = new Path(tmpFileTwo.toString());
+ localFs.create(tmpPathOne);
+ localFs.create(tmpPathTwo);
+ String[] args = new String[2];
+ args[0] = arg;
+ // create an empty path in between two valid files,
+ // which prior to HADOOP-10820 used to result in the
+ // working directory being added to "tmpjars" (or equivalent)
+ args[1] = String.format("%s,,%s",
+ tmpFileOne.toURI().toString(), tmpFileTwo.toURI().toString());
+ try {
+ new GenericOptionsParser(conf, args);
+ fail("Expected exception for empty filename");
+ } catch (IllegalArgumentException e) {
+ // expect to receive an IllegalArgumentException
+ GenericTestUtils.assertExceptionContains("File name can't be"
+ + " empty string", e);
+ }
+
+ // test zero file list length - it should create an exception
+ args[1] = ",,";
+ try {
+ new GenericOptionsParser(conf, args);
+ fail("Expected exception for zero file list length");
+ } catch (IllegalArgumentException e) {
+ // expect to receive an IllegalArgumentException
+ GenericTestUtils.assertExceptionContains("File name can't be"
+ + " empty string", e);
+ }
+
+ // test filename with space character
+ // it should create exception from parser in URI class
+ // due to URI syntax error
+ args[1] = String.format("%s, ,%s",
+ tmpFileOne.toURI().toString(), tmpFileTwo.toURI().toString());
+ try {
+ new GenericOptionsParser(conf, args);
+ fail("Expected exception for filename with space character");
+ } catch (IllegalArgumentException e) {
+ // expect to receive an IllegalArgumentException
+ GenericTestUtils.assertExceptionContains("URISyntaxException", e);
+ }
+ }
+ }
+
+ /**
* Test that options passed to the constructor are used.
*/
@SuppressWarnings("static-access")
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml Wed Aug 20 01:34:29 2014
@@ -238,7 +238,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-count \[-q\] <path> \.\.\. :\s*</expected-output>
+ <expected-output>^-count \[-q\] \[-h\] <path> \.\.\. :( )*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@@ -260,6 +260,10 @@
<type>RegexpComparator</type>
<expected-output>^( |\t)*DIR_COUNT FILE_COUNT CONTENT_SIZE FILE_NAME( )*</expected-output>
</comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^( |\t)*The -h option shows file sizes in human readable format.( )*</expected-output>
+ </comparator>
</comparators>
</test>
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java Wed Aug 20 01:34:29 2014
@@ -20,14 +20,15 @@ package org.apache.hadoop.crypto.key.kms
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.key.KeyProvider;
+import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
import org.apache.hadoop.security.AccessControlException;
-import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.security.token.delegation.web.HttpUserGroupInformation;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
@@ -38,16 +39,13 @@ import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
-import javax.ws.rs.core.SecurityContext;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
-import java.security.Principal;
-import java.text.MessageFormat;
+import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -59,41 +57,44 @@ import java.util.Map;
@Path(KMSRESTConstants.SERVICE_VERSION)
@InterfaceAudience.Private
public class KMS {
- private static final String CREATE_KEY = "CREATE_KEY";
- private static final String DELETE_KEY = "DELETE_KEY";
- private static final String ROLL_NEW_VERSION = "ROLL_NEW_VERSION";
- private static final String GET_KEYS = "GET_KEYS";
- private static final String GET_KEYS_METADATA = "GET_KEYS_METADATA";
- private static final String GET_KEY_VERSION = "GET_KEY_VERSION";
- private static final String GET_CURRENT_KEY = "GET_CURRENT_KEY";
- private static final String GET_KEY_VERSIONS = "GET_KEY_VERSIONS";
- private static final String GET_METADATA = "GET_METADATA";
- private static final String GENERATE_EEK = "GENERATE_EEK";
- private static final String DECRYPT_EEK = "DECRYPT_EEK";
+
+ public static enum KMSOp {
+ CREATE_KEY, DELETE_KEY, ROLL_NEW_VERSION,
+ GET_KEYS, GET_KEYS_METADATA,
+ GET_KEY_VERSIONS, GET_METADATA, GET_KEY_VERSION, GET_CURRENT_KEY,
+ GENERATE_EEK, DECRYPT_EEK
+ }
private KeyProviderCryptoExtension provider;
+ private KMSAudit kmsAudit;
public KMS() throws Exception {
provider = KMSWebApp.getKeyProvider();
+ kmsAudit= KMSWebApp.getKMSAudit();
}
- private static Principal getPrincipal(SecurityContext securityContext)
- throws AuthenticationException{
- Principal user = securityContext.getUserPrincipal();
- if (user == null) {
- throw new AuthenticationException("User must be authenticated");
- }
- return user;
+
+ private static final String UNAUTHORIZED_MSG_WITH_KEY =
+ "User:%s not allowed to do '%s' on '%s'";
+
+ private static final String UNAUTHORIZED_MSG_WITHOUT_KEY =
+ "User:%s not allowed to do '%s'";
+
+ private void assertAccess(KMSACLs.Type aclType, UserGroupInformation ugi,
+ KMSOp operation) throws AccessControlException {
+ assertAccess(aclType, ugi, operation, null);
}
- private static void assertAccess(KMSACLs.Type aclType, Principal principal,
- String operation, String key) throws AccessControlException {
- if (!KMSWebApp.getACLs().hasAccess(aclType, principal.getName())) {
+ private void assertAccess(KMSACLs.Type aclType,
+ UserGroupInformation ugi, KMSOp operation, String key)
+ throws AccessControlException {
+ if (!KMSWebApp.getACLs().hasAccess(aclType, ugi)) {
KMSWebApp.getUnauthorizedCallsMeter().mark();
- KMSAudit.unauthorized(principal, operation, key);
- throw new AuthorizationException(MessageFormat.format(
- "User:{0} not allowed to do ''{1}'' on ''{2}''",
- principal.getName(), operation, key));
+ kmsAudit.unauthorized(ugi, operation, key);
+ throw new AuthorizationException(String.format(
+ (key != null) ? UNAUTHORIZED_MSG_WITH_KEY
+ : UNAUTHORIZED_MSG_WITHOUT_KEY,
+ ugi.getShortUserName(), operation, key));
}
}
@@ -113,15 +114,14 @@ public class KMS {
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("unchecked")
- public Response createKey(@Context SecurityContext securityContext,
- Map jsonKey) throws Exception {
+ public Response createKey(Map jsonKey) throws Exception {
KMSWebApp.getAdminCallsMeter().mark();
- Principal user = getPrincipal(securityContext);
- String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD);
+ UserGroupInformation user = HttpUserGroupInformation.get();
+ final String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD);
KMSClientProvider.checkNotEmpty(name, KMSRESTConstants.NAME_FIELD);
- assertAccess(KMSACLs.Type.CREATE, user, CREATE_KEY, name);
+ assertAccess(KMSACLs.Type.CREATE, user, KMSOp.CREATE_KEY, name);
String cipher = (String) jsonKey.get(KMSRESTConstants.CIPHER_FIELD);
- String material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD);
+ final String material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD);
int length = (jsonKey.containsKey(KMSRESTConstants.LENGTH_FIELD))
? (Integer) jsonKey.get(KMSRESTConstants.LENGTH_FIELD) : 0;
String description = (String)
@@ -130,9 +130,9 @@ public class KMS {
jsonKey.get(KMSRESTConstants.ATTRIBUTES_FIELD);
if (material != null) {
assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user,
- CREATE_KEY + " with user provided material", name);
+ KMSOp.CREATE_KEY, name);
}
- KeyProvider.Options options = new KeyProvider.Options(
+ final KeyProvider.Options options = new KeyProvider.Options(
KMSWebApp.getConfiguration());
if (cipher != null) {
options.setCipher(cipher);
@@ -143,16 +143,23 @@ public class KMS {
options.setDescription(description);
options.setAttributes(attributes);
- KeyProvider.KeyVersion keyVersion = (material != null)
- ? provider.createKey(name, Base64.decodeBase64(material), options)
- : provider.createKey(name, options);
-
- provider.flush();
+ KeyProvider.KeyVersion keyVersion = user.doAs(
+ new PrivilegedExceptionAction<KeyVersion>() {
+ @Override
+ public KeyVersion run() throws Exception {
+ KeyProvider.KeyVersion keyVersion = (material != null)
+ ? provider.createKey(name, Base64.decodeBase64(material), options)
+ : provider.createKey(name, options);
+ provider.flush();
+ return keyVersion;
+ }
+ }
+ );
- KMSAudit.ok(user, CREATE_KEY, name, "UserProvidedMaterial:" +
+ kmsAudit.ok(user, KMSOp.CREATE_KEY, name, "UserProvidedMaterial:" +
(material != null) + " Description:" + description);
- if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user.getName())) {
+ if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user)) {
keyVersion = removeKeyMaterial(keyVersion);
}
Map json = KMSServerJSONUtils.toJSON(keyVersion);
@@ -166,16 +173,23 @@ public class KMS {
@DELETE
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}")
- public Response deleteKey(@Context SecurityContext securityContext,
- @PathParam("name") String name) throws Exception {
+ public Response deleteKey(@PathParam("name") final String name)
+ throws Exception {
KMSWebApp.getAdminCallsMeter().mark();
- Principal user = getPrincipal(securityContext);
- assertAccess(KMSACLs.Type.DELETE, user, DELETE_KEY, name);
+ UserGroupInformation user = HttpUserGroupInformation.get();
+ assertAccess(KMSACLs.Type.DELETE, user, KMSOp.DELETE_KEY, name);
KMSClientProvider.checkNotEmpty(name, "name");
- provider.deleteKey(name);
- provider.flush();
- KMSAudit.ok(user, DELETE_KEY, name, "");
+ user.doAs(new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ provider.deleteKey(name);
+ provider.flush();
+ return null;
+ }
+ });
+
+ kmsAudit.ok(user, KMSOp.DELETE_KEY, name, "");
return Response.ok().build();
}
@@ -184,29 +198,36 @@ public class KMS {
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
- public Response rolloverKey(@Context SecurityContext securityContext,
- @PathParam("name") String name, Map jsonMaterial)
- throws Exception {
+ public Response rolloverKey(@PathParam("name") final String name,
+ Map jsonMaterial) throws Exception {
KMSWebApp.getAdminCallsMeter().mark();
- Principal user = getPrincipal(securityContext);
- assertAccess(KMSACLs.Type.ROLLOVER, user, ROLL_NEW_VERSION, name);
+ UserGroupInformation user = HttpUserGroupInformation.get();
+ assertAccess(KMSACLs.Type.ROLLOVER, user, KMSOp.ROLL_NEW_VERSION, name);
KMSClientProvider.checkNotEmpty(name, "name");
- String material = (String)
+ final String material = (String)
jsonMaterial.get(KMSRESTConstants.MATERIAL_FIELD);
if (material != null) {
assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user,
- ROLL_NEW_VERSION + " with user provided material", name);
+ KMSOp.ROLL_NEW_VERSION, name);
}
- KeyProvider.KeyVersion keyVersion = (material != null)
- ? provider.rollNewVersion(name, Base64.decodeBase64(material))
- : provider.rollNewVersion(name);
- provider.flush();
+ KeyProvider.KeyVersion keyVersion = user.doAs(
+ new PrivilegedExceptionAction<KeyVersion>() {
+ @Override
+ public KeyVersion run() throws Exception {
+ KeyVersion keyVersion = (material != null)
+ ? provider.rollNewVersion(name, Base64.decodeBase64(material))
+ : provider.rollNewVersion(name);
+ provider.flush();
+ return keyVersion;
+ }
+ }
+ );
- KMSAudit.ok(user, ROLL_NEW_VERSION, name, "UserProvidedMaterial:" +
+ kmsAudit.ok(user, KMSOp.ROLL_NEW_VERSION, name, "UserProvidedMaterial:" +
(material != null) + " NewVersion:" + keyVersion.getVersionName());
- if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user.getName())) {
+ if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user)) {
keyVersion = removeKeyMaterial(keyVersion);
}
Map json = KMSServerJSONUtils.toJSON(keyVersion);
@@ -216,54 +237,78 @@ public class KMS {
@GET
@Path(KMSRESTConstants.KEYS_METADATA_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
- public Response getKeysMetadata(@Context SecurityContext securityContext,
- @QueryParam(KMSRESTConstants.KEY_OP) List<String> keyNamesList)
- throws Exception {
+ public Response getKeysMetadata(@QueryParam(KMSRESTConstants.KEY)
+ List<String> keyNamesList) throws Exception {
KMSWebApp.getAdminCallsMeter().mark();
- Principal user = getPrincipal(securityContext);
- String[] keyNames = keyNamesList.toArray(new String[keyNamesList.size()]);
- String names = StringUtils.arrayToString(keyNames);
- assertAccess(KMSACLs.Type.GET_METADATA, user, GET_KEYS_METADATA, names);
- KeyProvider.Metadata[] keysMeta = provider.getKeysMetadata(keyNames);
+ UserGroupInformation user = HttpUserGroupInformation.get();
+ final String[] keyNames = keyNamesList.toArray(
+ new String[keyNamesList.size()]);
+ assertAccess(KMSACLs.Type.GET_METADATA, user, KMSOp.GET_KEYS_METADATA);
+
+ KeyProvider.Metadata[] keysMeta = user.doAs(
+ new PrivilegedExceptionAction<KeyProvider.Metadata[]>() {
+ @Override
+ public KeyProvider.Metadata[] run() throws Exception {
+ return provider.getKeysMetadata(keyNames);
+ }
+ }
+ );
+
Object json = KMSServerJSONUtils.toJSON(keyNames, keysMeta);
- KMSAudit.ok(user, GET_KEYS_METADATA, names, "");
+ kmsAudit.ok(user, KMSOp.GET_KEYS_METADATA, "");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
@GET
@Path(KMSRESTConstants.KEYS_NAMES_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
- public Response getKeyNames(@Context SecurityContext securityContext)
- throws Exception {
+ public Response getKeyNames() throws Exception {
KMSWebApp.getAdminCallsMeter().mark();
- Principal user = getPrincipal(securityContext);
- assertAccess(KMSACLs.Type.GET_KEYS, user, GET_KEYS, "*");
- Object json = provider.getKeys();
- KMSAudit.ok(user, GET_KEYS, "*", "");
+ UserGroupInformation user = HttpUserGroupInformation.get();
+ assertAccess(KMSACLs.Type.GET_KEYS, user, KMSOp.GET_KEYS);
+
+ List<String> json = user.doAs(
+ new PrivilegedExceptionAction<List<String>>() {
+ @Override
+ public List<String> run() throws Exception {
+ return provider.getKeys();
+ }
+ }
+ );
+
+ kmsAudit.ok(user, KMSOp.GET_KEYS, "");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
@GET
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}")
- public Response getKey(@Context SecurityContext securityContext,
- @PathParam("name") String name)
+ public Response getKey(@PathParam("name") String name)
throws Exception {
- return getMetadata(securityContext, name);
+ return getMetadata(name);
}
@GET
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
KMSRESTConstants.METADATA_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
- public Response getMetadata(@Context SecurityContext securityContext,
- @PathParam("name") String name)
+ public Response getMetadata(@PathParam("name") final String name)
throws Exception {
- Principal user = getPrincipal(securityContext);
+ UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(name, "name");
KMSWebApp.getAdminCallsMeter().mark();
- assertAccess(KMSACLs.Type.GET_METADATA, user, GET_METADATA, name);
- Object json = KMSServerJSONUtils.toJSON(name, provider.getMetadata(name));
- KMSAudit.ok(user, GET_METADATA, name, "");
+ assertAccess(KMSACLs.Type.GET_METADATA, user, KMSOp.GET_METADATA, name);
+
+ KeyProvider.Metadata metadata = user.doAs(
+ new PrivilegedExceptionAction<KeyProvider.Metadata>() {
+ @Override
+ public KeyProvider.Metadata run() throws Exception {
+ return provider.getMetadata(name);
+ }
+ }
+ );
+
+ Object json = KMSServerJSONUtils.toJSON(name, metadata);
+ kmsAudit.ok(user, KMSOp.GET_METADATA, name, "");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
@@ -271,30 +316,50 @@ public class KMS {
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
KMSRESTConstants.CURRENT_VERSION_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
- public Response getCurrentVersion(@Context SecurityContext securityContext,
- @PathParam("name") String name)
+ public Response getCurrentVersion(@PathParam("name") final String name)
throws Exception {
- Principal user = getPrincipal(securityContext);
+ UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(name, "name");
KMSWebApp.getKeyCallsMeter().mark();
- assertAccess(KMSACLs.Type.GET, user, GET_CURRENT_KEY, name);
- Object json = KMSServerJSONUtils.toJSON(provider.getCurrentKey(name));
- KMSAudit.ok(user, GET_CURRENT_KEY, name, "");
+ assertAccess(KMSACLs.Type.GET, user, KMSOp.GET_CURRENT_KEY, name);
+
+ KeyVersion keyVersion = user.doAs(
+ new PrivilegedExceptionAction<KeyVersion>() {
+ @Override
+ public KeyVersion run() throws Exception {
+ return provider.getCurrentKey(name);
+ }
+ }
+ );
+
+ Object json = KMSServerJSONUtils.toJSON(keyVersion);
+ kmsAudit.ok(user, KMSOp.GET_CURRENT_KEY, name, "");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
@GET
@Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}")
@Produces(MediaType.APPLICATION_JSON)
- public Response getKeyVersion(@Context SecurityContext securityContext,
- @PathParam("versionName") String versionName)
- throws Exception {
- Principal user = getPrincipal(securityContext);
+ public Response getKeyVersion(
+ @PathParam("versionName") final String versionName) throws Exception {
+ UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(versionName, "versionName");
KMSWebApp.getKeyCallsMeter().mark();
- assertAccess(KMSACLs.Type.GET, user, GET_KEY_VERSION, versionName);
- Object json = KMSServerJSONUtils.toJSON(provider.getKeyVersion(versionName));
- KMSAudit.ok(user, GET_KEY_VERSION, versionName, "");
+ assertAccess(KMSACLs.Type.GET, user, KMSOp.GET_KEY_VERSION);
+
+ KeyVersion keyVersion = user.doAs(
+ new PrivilegedExceptionAction<KeyVersion>() {
+ @Override
+ public KeyVersion run() throws Exception {
+ return provider.getKeyVersion(versionName);
+ }
+ }
+ );
+
+ if (keyVersion != null) {
+ kmsAudit.ok(user, KMSOp.GET_KEY_VERSION, keyVersion.getName(), "");
+ }
+ Object json = KMSServerJSONUtils.toJSON(keyVersion);
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
@@ -304,30 +369,39 @@ public class KMS {
KMSRESTConstants.EEK_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
public Response generateEncryptedKeys(
- @Context SecurityContext securityContext,
- @PathParam("name") String name,
+ @PathParam("name") final String name,
@QueryParam(KMSRESTConstants.EEK_OP) String edekOp,
@DefaultValue("1")
- @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) int numKeys)
+ @QueryParam(KMSRESTConstants.EEK_NUM_KEYS) final int numKeys)
throws Exception {
- Principal user = getPrincipal(securityContext);
+ UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(name, "name");
KMSClientProvider.checkNotNull(edekOp, "eekOp");
Object retJSON;
if (edekOp.equals(KMSRESTConstants.EEK_GENERATE)) {
- assertAccess(KMSACLs.Type.GENERATE_EEK, user, GENERATE_EEK, name);
+ assertAccess(KMSACLs.Type.GENERATE_EEK, user, KMSOp.GENERATE_EEK, name);
- List<EncryptedKeyVersion> retEdeks =
+ final List<EncryptedKeyVersion> retEdeks =
new LinkedList<EncryptedKeyVersion>();
try {
- for (int i = 0; i < numKeys; i ++) {
- retEdeks.add(provider.generateEncryptedKey(name));
- }
+
+ user.doAs(
+ new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ for (int i = 0; i < numKeys; i++) {
+ retEdeks.add(provider.generateEncryptedKey(name));
+ }
+ return null;
+ }
+ }
+ );
+
} catch (Exception e) {
throw new IOException(e);
}
- KMSAudit.ok(user, GENERATE_EEK, name, "");
+ kmsAudit.ok(user, KMSOp.GENERATE_EEK, name, "");
retJSON = new ArrayList();
for (EncryptedKeyVersion edek : retEdeks) {
((ArrayList)retJSON).add(KMSServerJSONUtils.toJSON(edek));
@@ -347,33 +421,44 @@ public class KMS {
@Path(KMSRESTConstants.KEY_VERSION_RESOURCE + "/{versionName:.*}/" +
KMSRESTConstants.EEK_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
- public Response decryptEncryptedKey(@Context SecurityContext securityContext,
- @PathParam("versionName") String versionName,
+ public Response decryptEncryptedKey(
+ @PathParam("versionName") final String versionName,
@QueryParam(KMSRESTConstants.EEK_OP) String eekOp,
Map jsonPayload)
throws Exception {
- Principal user = getPrincipal(securityContext);
+ UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(versionName, "versionName");
KMSClientProvider.checkNotNull(eekOp, "eekOp");
- String keyName = (String) jsonPayload.get(KMSRESTConstants.NAME_FIELD);
+ final String keyName = (String) jsonPayload.get(
+ KMSRESTConstants.NAME_FIELD);
String ivStr = (String) jsonPayload.get(KMSRESTConstants.IV_FIELD);
String encMaterialStr =
(String) jsonPayload.get(KMSRESTConstants.MATERIAL_FIELD);
Object retJSON;
if (eekOp.equals(KMSRESTConstants.EEK_DECRYPT)) {
- assertAccess(KMSACLs.Type.DECRYPT_EEK, user, DECRYPT_EEK, versionName);
+ assertAccess(KMSACLs.Type.DECRYPT_EEK, user, KMSOp.DECRYPT_EEK, keyName);
KMSClientProvider.checkNotNull(ivStr, KMSRESTConstants.IV_FIELD);
- byte[] iv = Base64.decodeBase64(ivStr);
+ final byte[] iv = Base64.decodeBase64(ivStr);
KMSClientProvider.checkNotNull(encMaterialStr,
KMSRESTConstants.MATERIAL_FIELD);
- byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
- KeyProvider.KeyVersion retKeyVersion =
- provider.decryptEncryptedKey(
- new KMSClientProvider.KMSEncryptedKeyVersion(keyName, versionName,
- iv, KeyProviderCryptoExtension.EEK, encMaterial));
+ final byte[] encMaterial = Base64.decodeBase64(encMaterialStr);
+
+ KeyProvider.KeyVersion retKeyVersion = user.doAs(
+ new PrivilegedExceptionAction<KeyVersion>() {
+ @Override
+ public KeyVersion run() throws Exception {
+ return provider.decryptEncryptedKey(
+ new KMSClientProvider.KMSEncryptedKeyVersion(keyName,
+ versionName, iv, KeyProviderCryptoExtension.EEK,
+ encMaterial)
+ );
+ }
+ }
+ );
+
retJSON = KMSServerJSONUtils.toJSON(retKeyVersion);
- KMSAudit.ok(user, DECRYPT_EEK, versionName, "");
+ kmsAudit.ok(user, KMSOp.DECRYPT_EEK, keyName, "");
} else {
throw new IllegalArgumentException("Wrong " + KMSRESTConstants.EEK_OP +
" value, it must be " + KMSRESTConstants.EEK_GENERATE + " or " +
@@ -388,15 +473,24 @@ public class KMS {
@Path(KMSRESTConstants.KEY_RESOURCE + "/{name:.*}/" +
KMSRESTConstants.VERSIONS_SUB_RESOURCE)
@Produces(MediaType.APPLICATION_JSON)
- public Response getKeyVersions(@Context SecurityContext securityContext,
- @PathParam("name") String name)
+ public Response getKeyVersions(@PathParam("name") final String name)
throws Exception {
- Principal user = getPrincipal(securityContext);
+ UserGroupInformation user = HttpUserGroupInformation.get();
KMSClientProvider.checkNotEmpty(name, "name");
KMSWebApp.getKeyCallsMeter().mark();
- assertAccess(KMSACLs.Type.GET, user, GET_KEY_VERSIONS, name);
- Object json = KMSServerJSONUtils.toJSON(provider.getKeyVersions(name));
- KMSAudit.ok(user, GET_KEY_VERSIONS, name, "");
+ assertAccess(KMSACLs.Type.GET, user, KMSOp.GET_KEY_VERSIONS, name);
+
+ List<KeyVersion> ret = user.doAs(
+ new PrivilegedExceptionAction<List<KeyVersion>>() {
+ @Override
+ public List<KeyVersion> run() throws Exception {
+ return provider.getKeyVersions(name);
+ }
+ }
+ );
+
+ Object json = KMSServerJSONUtils.toJSON(ret);
+ kmsAudit.ok(user, KMSOp.GET_KEY_VERSIONS, name, "");
return Response.ok().type(MediaType.APPLICATION_JSON).entity(json).build();
}
Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java?rev=1619019&r1=1619018&r2=1619019&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSACLs.java Wed Aug 20 01:34:29 2014
@@ -113,8 +113,7 @@ public class KMSACLs implements Runnable
return conf;
}
- public boolean hasAccess(Type type, String user) {
- UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
+ public boolean hasAccess(Type type, UserGroupInformation ugi) {
return acls.get(type).isUserAllowed(ugi);
}