You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2014/07/14 23:23:21 UTC
svn commit: r1610533 - in
/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common: ./
src/main/java/ src/main/java/org/apache/hadoop/crypto/key/
src/main/native/src/org/apache/hadoop/io/compress/zlib/
src/main/native/src/org/apache/hadoop...
Author: szetszwo
Date: Mon Jul 14 21:23:20 2014
New Revision: 1610533
URL: http://svn.apache.org/r1610533
Log:
Merge r1609845 through r1610532 from trunk.
Modified:
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed)
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/ (props changed)
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt Mon Jul 14 21:23:20 2014
@@ -180,6 +180,8 @@ Trunk (Unreleased)
HADOOP-10812. Delegate KeyProviderExtension#toString to underlying
KeyProvider. (wang)
+ HADOOP-10736. Add key attributes to the key shell. (Mike Yoder via wang)
+
BUG FIXES
HADOOP-9451. Fault single-layer config if node group topology is enabled.
@@ -405,6 +407,11 @@ Release 2.6.0 - UNRELEASED
HADOOP-10507. FsShell setfacl can throw ArrayIndexOutOfBoundsException when
no perm is specified. (Stephen Chu and Sathish Gurram via cnauroth)
+ HADOOP-10780. hadoop_user_info_alloc fails on FreeBSD due to incorrect
+ sysconf use (Dmitry Sivachenko via Colin Patrick McCabe)
+
+ HADOOP-10810. Clean up native code compilation warnings. (cnauroth)
+
Release 2.5.0 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1609845-1610532
Propchange: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1609845-1610532
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java Mon Jul 14 21:23:20 2014
@@ -23,9 +23,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
-import java.net.URI;
import java.security.NoSuchAlgorithmException;
-import java.text.MessageFormat;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
@@ -37,7 +35,6 @@ import com.google.gson.stream.JsonWriter
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
import javax.crypto.KeyGenerator;
@@ -137,9 +134,26 @@ public abstract class KeyProvider {
}
public String toString() {
- return MessageFormat.format(
- "cipher: {0}, length: {1} description: {2} created: {3} version: {4}",
- cipher, bitLength, description, created, versions);
+ final StringBuilder metaSB = new StringBuilder();
+ metaSB.append("cipher: ").append(cipher).append(", ");
+ metaSB.append("length: ").append(bitLength).append(", ");
+ metaSB.append("description: ").append(description).append(", ");
+ metaSB.append("created: ").append(created).append(", ");
+ metaSB.append("version: ").append(versions).append(", ");
+ metaSB.append("attributes: ");
+ if ((attributes != null) && !attributes.isEmpty()) {
+ for (Map.Entry<String, String> attribute : attributes.entrySet()) {
+ metaSB.append("[");
+ metaSB.append(attribute.getKey());
+ metaSB.append("=");
+ metaSB.append(attribute.getValue());
+ metaSB.append("], ");
+ }
+ metaSB.deleteCharAt(metaSB.length() - 2); // remove last ', '
+ } else {
+ metaSB.append("null");
+ }
+ return metaSB.toString();
}
public String getDescription() {
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java Mon Jul 14 21:23:20 2014
@@ -22,7 +22,9 @@ import java.io.IOException;
import java.io.PrintStream;
import java.security.InvalidParameterException;
import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
@@ -90,6 +92,7 @@ public class KeyShell extends Configured
*/
private int init(String[] args) throws IOException {
final Options options = KeyProvider.options(getConf());
+ final Map<String, String> attributes = new HashMap<String, String>();
for (int i = 0; i < args.length; i++) { // parse command line
boolean moreTokens = (i < args.length - 1);
@@ -134,6 +137,23 @@ public class KeyShell extends Configured
options.setCipher(args[++i]);
} else if ("--description".equals(args[i]) && moreTokens) {
options.setDescription(args[++i]);
+ } else if ("--attr".equals(args[i]) && moreTokens) {
+ final String attrval[] = args[++i].split("=", 2);
+ final String attr = attrval[0].trim();
+ final String val = attrval[1].trim();
+ if (attr.isEmpty() || val.isEmpty()) {
+ out.println("\nAttributes must be in attribute=value form, " +
+ "or quoted\nlike \"attribute = value\"\n");
+ printKeyShellUsage();
+ return -1;
+ }
+ if (attributes.containsKey(attr)) {
+ out.println("\nEach attribute must correspond to only one value:\n" +
+ "atttribute \"" + attr + "\" was repeated\n" );
+ printKeyShellUsage();
+ return -1;
+ }
+ attributes.put(attr, val);
} else if ("--provider".equals(args[i]) && moreTokens) {
userSuppliedProvider = true;
getConf().set(KeyProviderFactory.KEY_PROVIDER_PATH, args[++i]);
@@ -156,6 +176,10 @@ public class KeyShell extends Configured
return -1;
}
+ if (!attributes.isEmpty()) {
+ options.setAttributes(attributes);
+ }
+
return 0;
}
@@ -404,6 +428,7 @@ public class KeyShell extends Configured
public static final String USAGE =
"create <keyname> [--cipher <cipher>] [--size <size>]\n" +
" [--description <description>]\n" +
+ " [--attr <attribute=value>]\n" +
" [--provider <provider>] [--help]";
public static final String DESC =
"The create subcommand creates a new key for the name specified\n" +
@@ -411,7 +436,9 @@ public class KeyShell extends Configured
"--provider argument. You may specify a cipher with the --cipher\n" +
"argument. The default cipher is currently \"AES/CTR/NoPadding\".\n" +
"The default keysize is 256. You may specify the requested key\n" +
- "length using the --size argument.\n";
+ "length using the --size argument. Arbitrary attribute=value\n" +
+ "style attributes may be specified using the --attr argument.\n" +
+ "--attr may be specified multiple times, once per attribute.\n";
final String keyName;
final Options options;
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Mon Jul 14 21:23:20 2014
@@ -379,6 +379,7 @@ Java_org_apache_hadoop_io_compress_zlib_
return (*env)->NewStringUTF(env, dl_info.dli_fname);
}
}
+ return (*env)->NewStringUTF(env, "Unavailable");
#endif
#ifdef WINDOWS
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Mon Jul 14 21:23:20 2014
@@ -580,6 +580,8 @@ Java_org_apache_hadoop_io_nativeio_Nativ
JNIEnv *env, jclass clazz, jint uid)
{
#ifdef UNIX
+ jstring jstr_username = NULL;
+ char *pw_buf = NULL;
int pw_lock_locked = 0;
if (pw_lock_object != NULL) {
if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
@@ -588,7 +590,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
pw_lock_locked = 1;
}
- char *pw_buf = NULL;
int rc;
size_t pw_buflen = get_pw_buflen();
if ((pw_buf = malloc(pw_buflen)) == NULL) {
@@ -623,7 +624,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
goto cleanup;
}
- jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+ jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
cleanup:
if (pw_lock_locked) {
@@ -664,7 +665,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
#ifdef WINDOWS
THROW(env, "java/io/IOException",
"The function POSIX.mmap() is not supported on Windows");
- return NULL;
+ return (jlong)(intptr_t)NULL;
#endif
}
@@ -684,7 +685,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
#ifdef WINDOWS
THROW(env, "java/io/IOException",
"The function POSIX.munmap() is not supported on Windows");
- return NULL;
#endif
}
@@ -700,6 +700,8 @@ Java_org_apache_hadoop_io_nativeio_Nativ
JNIEnv *env, jclass clazz, jint gid)
{
#ifdef UNIX
+ jstring jstr_groupname = NULL;
+ char *pw_buf = NULL;
int pw_lock_locked = 0;
if (pw_lock_object != NULL) {
@@ -709,7 +711,6 @@ Java_org_apache_hadoop_io_nativeio_Nativ
pw_lock_locked = 1;
}
- char *pw_buf = NULL;
int rc;
size_t pw_buflen = get_pw_buflen();
if ((pw_buf = malloc(pw_buflen)) == NULL) {
@@ -744,7 +745,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
goto cleanup;
}
- jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+ jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
PASS_EXCEPTIONS_GOTO(env, cleanup);
cleanup:
@@ -922,7 +923,7 @@ Java_org_apache_hadoop_io_nativeio_Nativ
#ifdef UNIX
THROW(env, "java/io/IOException",
"The function setFilePointer(FileDescriptor) is not supported on Unix");
- return NULL;
+ return (jlong)(intptr_t)NULL;
#endif
#ifdef WINDOWS
@@ -957,7 +958,7 @@ JNIEXPORT jboolean JNICALL Java_org_apac
#ifdef UNIX
THROW(env, "java/io/IOException",
"The function access0(path, access) is not supported on Unix");
- return NULL;
+ return (jlong)(intptr_t)NULL;
#endif
#ifdef WINDOWS
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/net/unix/DomainSocketWatcher.c Mon Jul 14 21:23:20 2014
@@ -120,17 +120,19 @@ Java_org_apache_hadoop_net_unix_DomainSo
JNIEnv *env, jobject obj, jint fd)
{
struct fd_set_data *sd;
- struct pollfd *pollfd, *last_pollfd;
+ struct pollfd *pollfd = NULL, *last_pollfd;
int used_size, i;
sd = (struct fd_set_data*)(intptr_t)(*env)->
GetLongField(env, obj, fd_set_data_fid);
used_size = sd->used_size;
for (i = 0; i < used_size; i++) {
- pollfd = sd->pollfd + i;
- if (pollfd->fd == fd) break;
+ if (sd->pollfd[i].fd == fd) {
+ pollfd = sd->pollfd + i;
+ break;
+ }
}
- if (i == used_size) {
+ if (pollfd == NULL) {
(*env)->Throw(env, newRuntimeException(env, "failed to remove fd %d "
"from the FdSet because it was never present.", fd));
return;
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c Mon Jul 14 21:23:20 2014
@@ -45,7 +45,7 @@ static void throw_ioexception(JNIEnv* en
FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
NULL, *(DWORD*) (&errnum), // reinterpret cast
MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
- (LPSTR*)&buffer, 0, NULL);
+ buffer, 0, NULL);
if (len > 0)
{
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c Mon Jul 14 21:23:20 2014
@@ -36,7 +36,7 @@
struct hadoop_user_info *hadoop_user_info_alloc(void)
{
struct hadoop_user_info *uinfo;
- size_t buf_sz;
+ long buf_sz;
char *buf;
uinfo = calloc(1, sizeof(struct hadoop_user_info));
Modified: hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1610533&r1=1610532&r2=1610533&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Mon Jul 14 21:23:20 2014
@@ -17,35 +17,41 @@
*/
package org.apache.hadoop.crypto.key;
-import static org.junit.Assert.*;
-
import java.io.ByteArrayOutputStream;
import java.io.File;
+import java.io.IOException;
import java.io.PrintStream;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
import org.junit.After;
-import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
public class TestKeyShell {
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
- private static File tmpDir;
-
private PrintStream initialStdOut;
private PrintStream initialStdErr;
+ /* The default JCEKS provider - for testing purposes */
+ private String jceksProvider;
+
@Before
public void setup() throws Exception {
outContent.reset();
errContent.reset();
- tmpDir = new File(System.getProperty("test.build.data", "target"),
+ final File tmpDir = new File(System.getProperty("test.build.data", "target"),
UUID.randomUUID().toString());
- tmpDir.mkdirs();
+ if (!tmpDir.mkdirs()) {
+ throw new IOException("Unable to create " + tmpDir);
+ }
+ jceksProvider = "jceks://file" + tmpDir + "/keystore.jceks";
initialStdOut = System.out;
initialStdErr = System.err;
System.setOut(new PrintStream(outContent));
@@ -58,65 +64,80 @@ public class TestKeyShell {
System.setErr(initialStdErr);
}
+ /**
+ * Delete a key from the default jceksProvider
+ * @param ks The KeyShell instance
+ * @param keyName The key to delete
+ * @throws Exception
+ */
+ private void deleteKey(KeyShell ks, String keyName) throws Exception {
+ int rc;
+ outContent.reset();
+ final String[] delArgs = {"delete", keyName, "--provider", jceksProvider};
+ rc = ks.run(delArgs);
+ assertEquals(0, rc);
+ assertTrue(outContent.toString().contains(keyName + " has been " +
+ "successfully deleted."));
+ }
+
+ /**
+ * Lists the keys in the jceksProvider
+ * @param ks The KeyShell instance
+ * @param wantMetadata True if you want metadata returned with the keys
+ * @return The output from the "list" call
+ * @throws Exception
+ */
+ private String listKeys(KeyShell ks, boolean wantMetadata) throws Exception {
+ int rc;
+ outContent.reset();
+ final String[] listArgs = {"list", "--provider", jceksProvider };
+ final String[] listArgsM = {"list", "--metadata", "--provider", jceksProvider };
+ rc = ks.run(wantMetadata ? listArgsM : listArgs);
+ assertEquals(0, rc);
+ return outContent.toString();
+ }
+
@Test
public void testKeySuccessfulKeyLifecycle() throws Exception {
- outContent.reset();
- String[] args1 = {"create", "key1", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
int rc = 0;
+ String keyName = "key1";
+
KeyShell ks = new KeyShell();
ks.setConf(new Configuration());
- rc = ks.run(args1);
- assertEquals(0, rc);
- assertTrue(outContent.toString().contains("key1 has been successfully " +
- "created."));
outContent.reset();
- String[] args2 = {"list", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
- rc = ks.run(args2);
+ final String[] args1 = {"create", keyName, "--provider", jceksProvider};
+ rc = ks.run(args1);
assertEquals(0, rc);
- assertTrue(outContent.toString().contains("key1"));
+ assertTrue(outContent.toString().contains(keyName + " has been " +
+ "successfully created."));
- outContent.reset();
- String[] args2a = {"list", "--metadata", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
- rc = ks.run(args2a);
- assertEquals(0, rc);
- assertTrue(outContent.toString().contains("key1"));
- assertTrue(outContent.toString().contains("description"));
- assertTrue(outContent.toString().contains("created"));
+ String listOut = listKeys(ks, false);
+ assertTrue(listOut.contains(keyName));
+
+ listOut = listKeys(ks, true);
+ assertTrue(listOut.contains(keyName));
+ assertTrue(listOut.contains("description"));
+ assertTrue(listOut.contains("created"));
outContent.reset();
- String[] args3 = {"roll", "key1", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
- rc = ks.run(args3);
+ final String[] args2 = {"roll", keyName, "--provider", jceksProvider};
+ rc = ks.run(args2);
assertEquals(0, rc);
assertTrue(outContent.toString().contains("key1 has been successfully " +
"rolled."));
- outContent.reset();
- String[] args4 = {"delete", "key1", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
- rc = ks.run(args4);
- assertEquals(0, rc);
- assertTrue(outContent.toString().contains("key1 has been successfully " +
- "deleted."));
+ deleteKey(ks, keyName);
- outContent.reset();
- String[] args5 = {"list", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
- rc = ks.run(args5);
- assertEquals(0, rc);
- assertFalse(outContent.toString(), outContent.toString().contains("key1"));
+ listOut = listKeys(ks, false);
+ assertFalse(listOut, listOut.contains(keyName));
}
/* HADOOP-10586 KeyShell didn't allow -description. */
@Test
public void testKeySuccessfulCreationWithDescription() throws Exception {
outContent.reset();
- String[] args1 = {"create", "key1", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks",
+ final String[] args1 = {"create", "key1", "--provider", jceksProvider,
"--description", "someDescription"};
int rc = 0;
KeyShell ks = new KeyShell();
@@ -126,20 +147,16 @@ public class TestKeyShell {
assertTrue(outContent.toString().contains("key1 has been successfully " +
"created."));
- outContent.reset();
- String[] args2a = {"list", "--metadata", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
- rc = ks.run(args2a);
- assertEquals(0, rc);
- assertTrue(outContent.toString().contains("description"));
- assertTrue(outContent.toString().contains("someDescription"));
+ String listOut = listKeys(ks, true);
+ assertTrue(listOut.contains("description"));
+ assertTrue(listOut.contains("someDescription"));
}
@Test
public void testInvalidKeySize() throws Exception {
- String[] args1 = {"create", "key1", "--size", "56", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
-
+ final String[] args1 = {"create", "key1", "--size", "56", "--provider",
+ jceksProvider};
+
int rc = 0;
KeyShell ks = new KeyShell();
ks.setConf(new Configuration());
@@ -150,9 +167,9 @@ public class TestKeyShell {
@Test
public void testInvalidCipher() throws Exception {
- String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
-
+ final String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider",
+ jceksProvider};
+
int rc = 0;
KeyShell ks = new KeyShell();
ks.setConf(new Configuration());
@@ -163,7 +180,7 @@ public class TestKeyShell {
@Test
public void testInvalidProvider() throws Exception {
- String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
+ final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
"sdff://file/tmp/keystore.jceks"};
int rc = 0;
@@ -177,7 +194,7 @@ public class TestKeyShell {
@Test
public void testTransientProviderWarning() throws Exception {
- String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
+ final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
"user:///"};
int rc = 0;
@@ -191,7 +208,7 @@ public class TestKeyShell {
@Test
public void testTransientProviderOnlyConfig() throws Exception {
- String[] args1 = {"create", "key1"};
+ final String[] args1 = {"create", "key1"};
int rc = 0;
KeyShell ks = new KeyShell();
@@ -206,23 +223,96 @@ public class TestKeyShell {
@Test
public void testFullCipher() throws Exception {
- String[] args1 = {"create", "key1", "--cipher", "AES/CBC/pkcs5Padding",
- "--provider", "jceks://file" + tmpDir + "/keystore.jceks"};
+ final String keyName = "key1";
+ final String[] args1 = {"create", keyName, "--cipher", "AES/CBC/pkcs5Padding",
+ "--provider", jceksProvider};
int rc = 0;
KeyShell ks = new KeyShell();
ks.setConf(new Configuration());
rc = ks.run(args1);
assertEquals(0, rc);
- assertTrue(outContent.toString().contains("key1 has been successfully " +
- "created."));
+ assertTrue(outContent.toString().contains(keyName + " has been " +
+ "successfully " + "created."));
+
+ deleteKey(ks, keyName);
+ }
+
+ @Test
+ public void testAttributes() throws Exception {
+ int rc;
+ KeyShell ks = new KeyShell();
+ ks.setConf(new Configuration());
+
+ /* Simple creation test */
+ final String[] args1 = {"create", "keyattr1", "--provider", jceksProvider,
+ "--attr", "foo=bar"};
+ rc = ks.run(args1);
+ assertEquals(0, rc);
+ assertTrue(outContent.toString().contains("keyattr1 has been " +
+ "successfully " + "created."));
+
+ /* ...and list to see that we have the attr */
+ String listOut = listKeys(ks, true);
+ assertTrue(listOut.contains("keyattr1"));
+ assertTrue(listOut.contains("attributes: [foo=bar]"));
+ /* Negative tests: no attribute */
outContent.reset();
- String[] args2 = {"delete", "key1", "--provider",
- "jceks://file" + tmpDir + "/keystore.jceks"};
+ final String[] args2 = {"create", "keyattr2", "--provider", jceksProvider,
+ "--attr", "=bar"};
+ rc = ks.run(args2);
+ assertEquals(-1, rc);
+
+ /* Not in attribute = value form */
+ outContent.reset();
+ args2[5] = "foo";
+ rc = ks.run(args2);
+ assertEquals(-1, rc);
+
+ /* No attribute or value */
+ outContent.reset();
+ args2[5] = "=";
+ rc = ks.run(args2);
+ assertEquals(-1, rc);
+
+ /* Legal: attribute is a, value is b=c */
+ outContent.reset();
+ args2[5] = "a=b=c";
rc = ks.run(args2);
assertEquals(0, rc);
- assertTrue(outContent.toString().contains("key1 has been successfully " +
- "deleted."));
+
+ listOut = listKeys(ks, true);
+ assertTrue(listOut.contains("keyattr2"));
+ assertTrue(listOut.contains("attributes: [a=b=c]"));
+
+ /* Test several attrs together... */
+ outContent.reset();
+ final String[] args3 = {"create", "keyattr3", "--provider", jceksProvider,
+ "--attr", "foo = bar",
+ "--attr", " glarch =baz ",
+ "--attr", "abc=def"};
+ rc = ks.run(args3);
+ assertEquals(0, rc);
+
+ /* ...and list to ensure they're there. */
+ listOut = listKeys(ks, true);
+ assertTrue(listOut.contains("keyattr3"));
+ assertTrue(listOut.contains("[foo=bar]"));
+ assertTrue(listOut.contains("[glarch=baz]"));
+ assertTrue(listOut.contains("[abc=def]"));
+
+ /* Negative test - repeated attributes should fail */
+ outContent.reset();
+ final String[] args4 = {"create", "keyattr4", "--provider", jceksProvider,
+ "--attr", "foo=bar",
+ "--attr", "foo=glarch"};
+ rc = ks.run(args4);
+ assertEquals(-1, rc);
+
+ /* Clean up to be a good citizen */
+ deleteKey(ks, "keyattr1");
+ deleteKey(ks, "keyattr2");
+ deleteKey(ks, "keyattr3");
}
}