You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2014/07/12 04:24:50 UTC

svn commit: r1609878 [3/4] - in /hadoop/common/branches/YARN-1051/hadoop-common-project: hadoop-auth/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server...

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java Sat Jul 12 02:24:40 2014
@@ -62,10 +62,13 @@ public class JvmPauseMonitor {
       "jvm.pause.info-threshold.ms";
   private static final long INFO_THRESHOLD_DEFAULT = 1000;
 
-  
+  private long numGcWarnThresholdExceeded = 0;
+  private long numGcInfoThresholdExceeded = 0;
+  private long totalGcExtraSleepTime = 0;
+   
   private Thread monitorThread;
   private volatile boolean shouldRun = true;
-  
+
   public JvmPauseMonitor(Configuration conf) {
     this.warnThresholdMs = conf.getLong(WARN_THRESHOLD_KEY, WARN_THRESHOLD_DEFAULT);
     this.infoThresholdMs = conf.getLong(INFO_THRESHOLD_KEY, INFO_THRESHOLD_DEFAULT);
@@ -87,6 +90,22 @@ public class JvmPauseMonitor {
       Thread.currentThread().interrupt();
     }
   }
+
+  public boolean isStarted() {
+    return monitorThread != null;
+  }
+  
+  public long getNumGcWarnThreadholdExceeded() {
+    return numGcWarnThresholdExceeded;
+  }
+  
+  public long getNumGcInfoThresholdExceeded() {
+    return numGcInfoThresholdExceeded;
+  }
+  
+  public long getTotalGcExtraSleepTime() {
+    return totalGcExtraSleepTime;
+  }
   
   private String formatMessage(long extraSleepTime,
       Map<String, GcTimes> gcTimesAfterSleep,
@@ -166,13 +185,15 @@ public class JvmPauseMonitor {
         Map<String, GcTimes> gcTimesAfterSleep = getGcTimes();
 
         if (extraSleepTime > warnThresholdMs) {
+          ++numGcWarnThresholdExceeded;
           LOG.warn(formatMessage(
               extraSleepTime, gcTimesAfterSleep, gcTimesBeforeSleep));
         } else if (extraSleepTime > infoThresholdMs) {
+          ++numGcInfoThresholdExceeded;
           LOG.info(formatMessage(
               extraSleepTime, gcTimesAfterSleep, gcTimesBeforeSleep));
         }
-        
+        totalGcExtraSleepTime += extraSleepTime;
         gcTimesBeforeSleep = gcTimesAfterSleep;
       }
     }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32.java Sat Jul 12 02:24:40 2014
@@ -57,38 +57,31 @@ public class PureJavaCrc32 implements Ch
   }
 
   @Override
-  public void update(byte[] b, int off, int len) {
+  public void update(final byte[] b, final int offset, final int len) {
     int localCrc = crc;
 
-    while(len > 7) {
-      final int c0 =(b[off+0] ^ localCrc) & 0xff;
-      final int c1 =(b[off+1] ^ (localCrc >>>= 8)) & 0xff;
-      final int c2 =(b[off+2] ^ (localCrc >>>= 8)) & 0xff;
-      final int c3 =(b[off+3] ^ (localCrc >>>= 8)) & 0xff;
-      localCrc = (T[T8_7_start + c0] ^ T[T8_6_start + c1])
-          ^ (T[T8_5_start + c2] ^ T[T8_4_start + c3]);
+    final int remainder = len & 0x7;
+    int i = offset;
+    for(final int end = offset + len - remainder; i < end; i += 8) {
+      final int x = localCrc
+          ^ ((((b[i  ] << 24) >>> 24) + ((b[i+1] << 24) >>> 16))
+           + (((b[i+2] << 24) >>> 8 ) +  (b[i+3] << 24)));
 
-      final int c4 = b[off+4] & 0xff;
-      final int c5 = b[off+5] & 0xff;
-      final int c6 = b[off+6] & 0xff;
-      final int c7 = b[off+7] & 0xff;
-
-      localCrc ^= (T[T8_3_start + c4] ^ T[T8_2_start + c5])
-           ^ (T[T8_1_start + c6] ^ T[T8_0_start + c7]);
-
-      off += 8;
-      len -= 8;
+      localCrc = ((T[((x << 24) >>> 24) + 0x700] ^ T[((x << 16) >>> 24) + 0x600])
+                ^ (T[((x <<  8) >>> 24) + 0x500] ^ T[ (x        >>> 24) + 0x400]))
+               ^ ((T[((b[i+4] << 24) >>> 24) + 0x300] ^ T[((b[i+5] << 24) >>> 24) + 0x200])
+                ^ (T[((b[i+6] << 24) >>> 24) + 0x100] ^ T[((b[i+7] << 24) >>> 24)]));
     }
 
     /* loop unroll - duff's device style */
-    switch(len) {
-      case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
-      case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
-      case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
-      case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
-      case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
-      case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
-      case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)];
+    switch(remainder) {
+      case 7: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
+      case 6: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
+      case 5: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
+      case 4: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
+      case 3: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
+      case 2: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
+      case 1: localCrc = (localCrc >>> 8) ^ T[((localCrc ^ b[i++]) << 24) >>> 24];
       default:
         /* nothing */
     }
@@ -99,24 +92,15 @@ public class PureJavaCrc32 implements Ch
 
   @Override
   final public void update(int b) {
-    crc = (crc >>> 8) ^ T[T8_0_start + ((crc ^ b) & 0xff)];
+    crc = (crc >>> 8) ^ T[(((crc ^ b) << 24) >>> 24)];
   }
 
   /*
    * CRC-32 lookup tables generated by the polynomial 0xEDB88320.
    * See also TestPureJavaCrc32.Table.
    */
-  private static final int T8_0_start = 0*256;
-  private static final int T8_1_start = 1*256;
-  private static final int T8_2_start = 2*256;
-  private static final int T8_3_start = 3*256;
-  private static final int T8_4_start = 4*256;
-  private static final int T8_5_start = 5*256;
-  private static final int T8_6_start = 6*256;
-  private static final int T8_7_start = 7*256;
-
   private static final int[] T = new int[] {
-  	/* T8_0 */
+    /* T8_0 */
     0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 
     0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3, 
     0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Sat Jul 12 02:24:40 2014
@@ -126,6 +126,9 @@ abstract public class Shell {
   public static final boolean LINUX   = (osType == OSType.OS_TYPE_LINUX);
   public static final boolean OTHER   = (osType == OSType.OS_TYPE_OTHER);
 
+  public static final boolean PPC_64
+                = System.getProperties().getProperty("os.arch").contains("ppc64");
+
   /** a Unix command to get the current user's groups list */
   public static String[] getGroupsCommand() {
     return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
@@ -618,7 +621,7 @@ abstract public class Shell {
    * This is an IOException with exit code added.
    */
   public static class ExitCodeException extends IOException {
-    int exitCode;
+    private final int exitCode;
     
     public ExitCodeException(int exitCode, String message) {
       super(message);
@@ -628,6 +631,16 @@ abstract public class Shell {
     public int getExitCode() {
       return exitCode;
     }
+
+    @Override
+    public String toString() {
+      final StringBuilder sb =
+          new StringBuilder("ExitCodeException ");
+      sb.append("exitCode=").append(exitCode)
+        .append(": ");
+      sb.append(super.getMessage());
+      return sb.toString();
+    }
   }
   
   /**

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Sat Jul 12 02:24:40 2014
@@ -388,10 +388,10 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz,
   jobject buffer, jlong len)
 {
-#ifdef UNIX
   void* buf = (void*)(*env)->GetDirectBufferAddress(env, buffer);
   PASS_EXCEPTIONS(env);
 
+#ifdef UNIX
   if (mlock(buf, len)) {
     CHECK_DIRECT_BUFFER_ADDRESS(buf);
     throw_ioe(env, errno);
@@ -399,37 +399,11 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #endif
 
 #ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.mlock_native() is not supported on Windows");
-#endif
-}
-
-/**
- * public static native void munlock_native(
- *   ByteBuffer buffer, long offset);
- *
- * The "00024" in the function name is an artifact of how JNI encodes
- * special characters. U+0024 is '$'.
- */
-JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_munlock_1native(
-  JNIEnv *env, jclass clazz,
-  jobject buffer, jlong len)
-{
-#ifdef UNIX
-  void* buf = (void*)(*env)->GetDirectBufferAddress(env, buffer);
-  PASS_EXCEPTIONS(env);
-
-  if (munlock(buf, len)) {
+  if (!VirtualLock(buf, len)) {
     CHECK_DIRECT_BUFFER_ADDRESS(buf);
-    throw_ioe(env, errno);
+    throw_ioe(env, GetLastError());
   }
 #endif
-
-#ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.munlock_native() is not supported on Windows");
-#endif
 }
 
 #ifdef __FreeBSD__
@@ -1008,6 +982,40 @@ cleanup:
 #endif
 }
 
+/*
+ * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method:    extendWorkingSetSize
+ * Signature: (J)V
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_extendWorkingSetSize(
+  JNIEnv *env, jclass clazz, jlong delta)
+{
+#ifdef UNIX
+  THROW(env, "java/io/IOException",
+    "The function extendWorkingSetSize(delta) is not supported on Unix");
+#endif
+
+#ifdef WINDOWS
+  SIZE_T min, max;
+  HANDLE hProcess = GetCurrentProcess();
+  if (!GetProcessWorkingSetSize(hProcess, &min, &max)) {
+    throw_ioe(env, GetLastError());
+    return;
+  }
+  if (!SetProcessWorkingSetSizeEx(hProcess, min + delta, max + delta,
+      QUOTA_LIMITS_HARDWS_MIN_DISABLE | QUOTA_LIMITS_HARDWS_MAX_DISABLE)) {
+    throw_ioe(env, GetLastError());
+    return;
+  }
+  // There is no need to call CloseHandle on the pseudo-handle returned from
+  // GetCurrentProcess.
+#endif
+}
+
 JNIEXPORT void JNICALL 
 Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env, 
 jclass clazz, jstring jsrc, jstring jdst)

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/hadoop_user_info.c Sat Jul 12 02:24:40 2014
@@ -193,7 +193,7 @@ int hadoop_user_info_getgroups(struct ha
   ngroups = uinfo->gids_size;
   ret = getgrouplist(uinfo->pwd.pw_name, uinfo->pwd.pw_gid, 
                          uinfo->gids, &ngroups);
-  if (ret > 0) {
+  if (ret >= 0) {
     uinfo->num_gids = ngroups;
     ret = put_primary_gid_first(uinfo);
     if (ret) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Sat Jul 12 02:24:40 2014
@@ -94,6 +94,98 @@
   </description>
 </property>
 
+<!-- 
+=== Multiple group mapping providers configuration sample === 
+  This sample illustrates a typical use case for CompositeGroupsMapping where
+Hadoop authentication uses MIT Kerberos which trusts an AD realm. In this case, service
+principals such as hdfs, mapred, hbase, hive, oozie and etc can be placed in In MIT Kerberos,
+but end users are just from the trusted AD. For the service principals, ShellBasedUnixGroupsMapping
+provider can be used to query their groups for efficiency, and for end users, LdapGroupsMapping 
+provider can be used. This avoids to add group entries in AD for service principals when only using 
+LdapGroupsMapping provider.
+  In case multiple ADs are involved and trusted by the MIT Kerberos in this use case, LdapGroupsMapping
+provider can be used more times with different AD specific configurations. This sample also shows how
+to do that. Here are the necessary configurations.
+
+<property>
+  <name>hadoop.security.group.mapping</name>
+  <value>org.apache.hadoop.security.CompositeGroupsMapping</value>
+  <description>
+    Class for user to group mapping (get groups for a given user) for ACL, which 
+    makes use of other multiple providers to provide the service.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.security.group.mapping.providers</name>
+  <value>shell4services,ad4usersX,ad4usersY</value>
+  <description>
+    Comma separated of names of other providers to provide user to group mapping. 
+  </description>
+</property>
+
+<property>
+  <name>hadoop.security.group.mapping.providers.combined</name>
+  <value>true</value>
+  <description>
+    true or false to indicate whether groups from the providers are combined or not. The default value is true
+    If true, then all the providers will be tried to get groups and all the groups are combined to return as
+    the final results. Otherwise, providers are tried one by one in the configured list order, and if any
+    groups are retrieved from any provider, then the groups will be returned without trying the left ones.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.security.group.mapping.provider.shell4services</name>
+  <value>org.apache.hadoop.security.ShellBasedUnixGroupsMapping</value>
+  <description>
+    Class for group mapping provider named by 'shell4services'. The name can then be referenced 
+    by hadoop.security.group.mapping.providers property.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.security.group.mapping.provider.ad4usersX</name>
+  <value>org.apache.hadoop.security.LdapGroupsMapping</value>
+  <description>
+    Class for group mapping provider named by 'ad4usersX'. The name can then be referenced 
+    by hadoop.security.group.mapping.providers property.
+  </description>
+</property>
+
+<property>
+  <name>hadoop.security.group.mapping.provider.ad4usersY</name>
+  <value>org.apache.hadoop.security.LdapGroupsMapping</value>
+  <description>
+    Class for group mapping provider named by 'ad4usersY'. The name can then be referenced 
+    by hadoop.security.group.mapping.providers property.
+  </description>
+</property>
+
+<property>
+<name>hadoop.security.group.mapping.provider.ad4usersX.ldap.url</name>
+<value>ldap://ad-host-for-users-X:389</value>
+  <description>
+    ldap url for the provider named by 'ad4usersX'. Note this property comes from 
+    'hadoop.security.group.mapping.ldap.url'.
+  </description>
+</property>
+
+<property>
+<name>hadoop.security.group.mapping.provider.ad4usersY.ldap.url</name>
+<value>ldap://ad-host-for-users-Y:389</value>
+  <description>
+    ldap url for the provider named by 'ad4usersY'. Note this property comes from 
+    'hadoop.security.group.mapping.ldap.url'.
+  </description>
+</property>
+
+You also need to configure other properties like
+  hadoop.security.group.mapping.ldap.bind.password.file and etc.
+for ldap providers in the same way as above does.
+
+-->
+ 
 <property>
   <name>hadoop.security.groups.cache.secs</name>
   <value>300</value>
@@ -1358,4 +1450,9 @@
     true.
   </description>
 </property>
+<property>
+  <name>fs.har.impl.disable.cache</name>
+  <value>true</value>
+  <description>Don't cache 'har' filesystem instances.</description>
+</property>
 </configuration>

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/CommandsManual.apt.vm Sat Jul 12 02:24:40 2014
@@ -306,9 +306,9 @@ Administration Commands
 
    Runs a cluster balancing utility. An administrator can simply press Ctrl-C
    to stop the rebalancing process. See
-   {{{../hadoop-hdfs/HdfsUserGuide.html#Rebalancer}Rebalancer}} for more details.
+   {{{../hadoop-hdfs/HdfsUserGuide.html#Balancer}Balancer}} for more details.
 
-   Usage: <<<hadoop balancer [-threshold <threshold>]>>>
+   Usage: <<<hadoop balancer [-threshold <threshold>] [-policy <policy>]>>>
 
 *------------------------+-----------------------------------------------------------+
 || COMMAND_OPTION        | Description
@@ -316,6 +316,11 @@ Administration Commands
 | -threshold <threshold> | Percentage of disk capacity. This overwrites the
                          | default threshold.
 *------------------------+-----------------------------------------------------------+
+| -policy <policy>       | <<<datanode>>> (default): Cluster is balanced if each datanode is balanced. \
+                         | <<<blockpool>>>: Cluster is balanced if each block pool in each datanode is balanced.
+*------------------------+-----------------------------------------------------------+
+
+   Note that the <<<blockpool>>> policy is more strict than the <<<datanode>>> policy.
 
 * <<<daemonlog>>>
 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/Metrics.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/Metrics.apt.vm?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/Metrics.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/Metrics.apt.vm Sat Jul 12 02:24:40 2014
@@ -86,6 +86,14 @@ jvm context
 *-------------------------------------+--------------------------------------+
 |<<<LogInfo>>> | Total number of INFO logs
 *-------------------------------------+--------------------------------------+
+|<<<GcNumWarnThresholdExceeded>>> | Number of times that the GC warn
+                                  | threshold is exceeded
+*-------------------------------------+--------------------------------------+
+|<<<GcNumInfoThresholdExceeded>>> | Number of times that the GC info
+                                  | threshold is exceeded
+*-------------------------------------+--------------------------------------+
+|<<<GcTotalExtraSleepTime>>> | Total GC extra sleep time in msec
+*-------------------------------------+--------------------------------------+
 
 rpc context
 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/SecureMode.apt.vm Sat Jul 12 02:24:40 2014
@@ -236,6 +236,25 @@ KVNO Timestamp         Principal
   </property>
 ----
 
+  The <<<hadoop.proxyuser.${superuser}.hosts>>> accepts list of ip addresses,
+  ip address ranges in CIDR format and/or host names.
+  
+  For example, by specifying as below in core-site.xml,
+  user named <<<oozie>>> accessing from hosts in the range 
+  10.222.0.0-15 and 10.113.221.221
+  can impersonate any user belonging to any group.
+  
+  ----
+  <property>
+    <name>hadoop.proxyuser.oozie.hosts</name>
+    <value>10.222.0.0/16,10.113.221.221</value>
+  </property>
+  <property>
+    <name>hadoop.proxyuser.oozie.groups</name>
+    <value>*</value>
+  </property>
+----
+
 ** Secure DataNode
 
   Because the data transfer protocol of DataNode

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/site/apt/ServiceLevelAuth.apt.vm Sat Jul 12 02:24:40 2014
@@ -100,11 +100,15 @@ security.ha.service.protocol.acl      | 
    Example: <<<user1,user2 group1,group2>>>.
 
    Add a blank at the beginning of the line if only a list of groups is to
-   be provided, equivalently a comman-separated list of users followed by
+   be provided, equivalently a comma-separated list of users followed by
    a space or nothing implies only a set of given users.
 
    A special value of <<<*>>> implies that all users are allowed to access the
-   service.
+   service. 
+   
+   If access control list is not defined for a service, the value of
+   <<<security.service.authorization.default.acl>>> is applied. If 
+   <<<security.service.authorization.default.acl>>> is not defined, <<<*>>>  is applied.
 
 ** Refreshing Service Level Authorization Configuration
 

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java Sat Jul 12 02:24:40 2014
@@ -21,6 +21,7 @@ import org.junit.Assert;
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.ProviderUtils;
 import org.junit.Test;
 
 import java.io.IOException;
@@ -29,7 +30,9 @@ import java.security.NoSuchAlgorithmExce
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
@@ -61,7 +64,7 @@ public class TestKeyProvider {
   @Test
   public void testKeyMaterial() throws Exception {
     byte[] key1 = new byte[]{1,2,3,4};
-    KeyProvider.KeyVersion obj = new KeyProvider.KeyVersion("key1@1", key1);
+    KeyProvider.KeyVersion obj = new KeyProvider.KeyVersion("key1", "key1@1", key1);
     assertEquals("key1@1", obj.getVersionName());
     assertArrayEquals(new byte[]{1,2,3,4}, obj.getMaterial());
   }
@@ -72,7 +75,7 @@ public class TestKeyProvider {
     DateFormat format = new SimpleDateFormat("y/m/d");
     Date date = format.parse("2013/12/25");
     KeyProvider.Metadata meta = new KeyProvider.Metadata("myCipher", 100, null,
-        date, 123);
+        null, date, 123);
     assertEquals("myCipher", meta.getCipher());
     assertEquals(100, meta.getBitLength());
     assertNull(meta.getDescription());
@@ -82,6 +85,7 @@ public class TestKeyProvider {
     assertEquals(meta.getCipher(), second.getCipher());
     assertEquals(meta.getBitLength(), second.getBitLength());
     assertNull(second.getDescription());
+    assertTrue(second.getAttributes().isEmpty());
     assertEquals(meta.getCreated(), second.getCreated());
     assertEquals(meta.getVersions(), second.getVersions());
     int newVersion = second.addVersion();
@@ -92,17 +96,21 @@ public class TestKeyProvider {
     //Metadata with description
     format = new SimpleDateFormat("y/m/d");
     date = format.parse("2013/12/25");
+    Map<String, String> attributes = new HashMap<String, String>();
+    attributes.put("a", "A");
     meta = new KeyProvider.Metadata("myCipher", 100,
-        "description", date, 123);
+        "description", attributes, date, 123);
     assertEquals("myCipher", meta.getCipher());
     assertEquals(100, meta.getBitLength());
     assertEquals("description", meta.getDescription());
+    assertEquals(attributes, meta.getAttributes());
     assertEquals(date, meta.getCreated());
     assertEquals(123, meta.getVersions());
     second = new KeyProvider.Metadata(meta.serialize());
     assertEquals(meta.getCipher(), second.getCipher());
     assertEquals(meta.getBitLength(), second.getBitLength());
     assertEquals(meta.getDescription(), second.getDescription());
+    assertEquals(meta.getAttributes(), second.getAttributes());
     assertEquals(meta.getCreated(), second.getCreated());
     assertEquals(meta.getVersions(), second.getVersions());
     newVersion = second.addVersion();
@@ -116,15 +124,19 @@ public class TestKeyProvider {
     Configuration conf = new Configuration();
     conf.set(KeyProvider.DEFAULT_CIPHER_NAME, "myCipher");
     conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 512);
+    Map<String, String> attributes = new HashMap<String, String>();
+    attributes.put("a", "A");
     KeyProvider.Options options = KeyProvider.options(conf);
     assertEquals("myCipher", options.getCipher());
     assertEquals(512, options.getBitLength());
     options.setCipher("yourCipher");
     options.setDescription("description");
+    options.setAttributes(attributes);
     options.setBitLength(128);
     assertEquals("yourCipher", options.getCipher());
     assertEquals(128, options.getBitLength());
     assertEquals("description", options.getDescription());
+    assertEquals(attributes, options.getAttributes());
     options = KeyProvider.options(new Configuration());
     assertEquals(KeyProvider.DEFAULT_CIPHER, options.getCipher());
     assertEquals(KeyProvider.DEFAULT_BITLENGTH, options.getBitLength());
@@ -133,13 +145,13 @@ public class TestKeyProvider {
   @Test
   public void testUnnestUri() throws Exception {
     assertEquals(new Path("hdfs://nn.example.com/my/path"),
-        KeyProvider.unnestUri(new URI("myscheme://hdfs@nn.example.com/my/path")));
+        ProviderUtils.unnestUri(new URI("myscheme://hdfs@nn.example.com/my/path")));
     assertEquals(new Path("hdfs://nn/my/path?foo=bar&baz=bat#yyy"),
-        KeyProvider.unnestUri(new URI("myscheme://hdfs@nn/my/path?foo=bar&baz=bat#yyy")));
+        ProviderUtils.unnestUri(new URI("myscheme://hdfs@nn/my/path?foo=bar&baz=bat#yyy")));
     assertEquals(new Path("inner://hdfs@nn1.example.com/my/path"),
-        KeyProvider.unnestUri(new URI("outer://inner@hdfs@nn1.example.com/my/path")));
+        ProviderUtils.unnestUri(new URI("outer://inner@hdfs@nn1.example.com/my/path")));
     assertEquals(new Path("user:///"),
-        KeyProvider.unnestUri(new URI("outer://user/")));
+        ProviderUtils.unnestUri(new URI("outer://user/")));
   }
 
   private static class MyKeyProvider extends KeyProvider {
@@ -166,7 +178,7 @@ public class TestKeyProvider {
 
     @Override
     public Metadata getMetadata(String name) throws IOException {
-      return new Metadata(CIPHER, 128, "description", new Date(), 0);
+      return new Metadata(CIPHER, 128, "description", null, new Date(), 0);
     }
 
     @Override

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java Sat Jul 12 02:24:40 2014
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.ProviderUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.Assert;
 import org.junit.Before;
@@ -213,7 +214,7 @@ public class TestKeyProviderFactory {
     file.delete();
     conf.set(KeyProviderFactory.KEY_PROVIDER_PATH, ourUrl);
     checkSpecificProvider(conf, ourUrl);
-    Path path = KeyProvider.unnestUri(new URI(ourUrl));
+    Path path = ProviderUtils.unnestUri(new URI(ourUrl));
     FileSystem fs = path.getFileSystem(conf);
     FileStatus s = fs.getFileStatus(path);
     assertTrue(s.getPermission().toString().equals("rwx------"));

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Sat Jul 12 02:24:40 2014
@@ -17,35 +17,41 @@
  */
 package org.apache.hadoop.crypto.key;
 
-import static org.junit.Assert.*;
-
 import java.io.ByteArrayOutputStream;
 import java.io.File;
+import java.io.IOException;
 import java.io.PrintStream;
 import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 public class TestKeyShell {
   private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
   private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
 
-  private static File tmpDir;
-
   private PrintStream initialStdOut;
   private PrintStream initialStdErr;
 
+  /* The default JCEKS provider - for testing purposes */
+  private String jceksProvider;
+
   @Before
   public void setup() throws Exception {
     outContent.reset();
     errContent.reset();
-    tmpDir = new File(System.getProperty("test.build.data", "target"),
+    final File tmpDir = new File(System.getProperty("test.build.data", "target"),
         UUID.randomUUID().toString());
-    tmpDir.mkdirs();
+    if (!tmpDir.mkdirs()) {
+      throw new IOException("Unable to create " + tmpDir);
+    }
+    jceksProvider = "jceks://file" + tmpDir + "/keystore.jceks";
     initialStdOut = System.out;
     initialStdErr = System.err;
     System.setOut(new PrintStream(outContent));
@@ -58,65 +64,80 @@ public class TestKeyShell {
     System.setErr(initialStdErr);
   }
 
+  /**
+   * Delete a key from the default jceksProvider
+   * @param ks The KeyShell instance
+   * @param keyName The key to delete
+   * @throws Exception
+   */
+  private void deleteKey(KeyShell ks, String keyName) throws Exception {
+    int rc;
+    outContent.reset();
+    final String[] delArgs = {"delete", keyName, "--provider", jceksProvider};
+    rc = ks.run(delArgs);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully deleted."));
+  }
+
+  /**
+   * Lists the keys in the jceksProvider
+   * @param ks The KeyShell instance
+   * @param wantMetadata True if you want metadata returned with the keys
+   * @return The output from the "list" call
+   * @throws Exception
+   */
+  private String listKeys(KeyShell ks, boolean wantMetadata) throws Exception {
+    int rc;
+    outContent.reset();
+    final String[] listArgs = {"list", "--provider", jceksProvider };
+    final String[] listArgsM = {"list", "--metadata", "--provider", jceksProvider };
+    rc = ks.run(wantMetadata ? listArgsM : listArgs);
+    assertEquals(0, rc);
+    return outContent.toString();
+  }
+
   @Test
   public void testKeySuccessfulKeyLifecycle() throws Exception {
-    outContent.reset();
-    String[] args1 = {"create", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
     int rc = 0;
+    String keyName = "key1";
+
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
-    rc = ks.run(args1);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"created."));
 
     outContent.reset();
-    String[] args2 = {"list", "--provider",
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2);
+    final String[] args1 = {"create", keyName, "--provider", jceksProvider};
+    rc = ks.run(args1);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully created."));
 
-    outContent.reset();
-    String[] args2a = {"list", "--metadata", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2a);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1"));
-    assertTrue(outContent.toString().contains("description"));
-    assertTrue(outContent.toString().contains("created"));
+    String listOut = listKeys(ks, false);
+    assertTrue(listOut.contains(keyName));
+
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains(keyName));
+    assertTrue(listOut.contains("description"));
+    assertTrue(listOut.contains("created"));
 
     outContent.reset();
-    String[] args3 = {"roll", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args3);
+    final String[] args2 = {"roll", keyName, "--provider", jceksProvider};
+    rc = ks.run(args2);
     assertEquals(0, rc);
     assertTrue(outContent.toString().contains("key1 has been successfully " +
     		"rolled."));
 
-    outContent.reset();
-    String[] args4 = {"delete", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args4);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"deleted."));
+    deleteKey(ks, keyName);
 
-    outContent.reset();
-    String[] args5 = {"list", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args5);
-    assertEquals(0, rc);
-    assertFalse(outContent.toString(), outContent.toString().contains("key1"));
+    listOut = listKeys(ks, false);
+    assertFalse(listOut, listOut.contains(keyName));
   }
   
   /* HADOOP-10586 KeyShell didn't allow -description. */
   @Test
   public void testKeySuccessfulCreationWithDescription() throws Exception {
     outContent.reset();
-    String[] args1 = {"create", "key1", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks",
+    final String[] args1 = {"create", "key1", "--provider", jceksProvider,
                       "--description", "someDescription"};
     int rc = 0;
     KeyShell ks = new KeyShell();
@@ -126,20 +147,16 @@ public class TestKeyShell {
     assertTrue(outContent.toString().contains("key1 has been successfully " +
         "created."));
 
-    outContent.reset();
-    String[] args2a = {"list", "--metadata", "--provider",
-                      "jceks://file" + tmpDir + "/keystore.jceks"};
-    rc = ks.run(args2a);
-    assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("description"));
-    assertTrue(outContent.toString().contains("someDescription"));
+    String listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("description"));
+    assertTrue(listOut.contains("someDescription"));
   }
 
   @Test
   public void testInvalidKeySize() throws Exception {
-    String[] args1 = {"create", "key1", "--size", "56", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    
+    final String[] args1 = {"create", "key1", "--size", "56", "--provider",
+            jceksProvider};
+
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
@@ -150,9 +167,9 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidCipher() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
-    
+    final String[] args1 = {"create", "key1", "--cipher", "LJM", "--provider",
+            jceksProvider};
+
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
@@ -163,7 +180,7 @@ public class TestKeyShell {
 
   @Test
   public void testInvalidProvider() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES", "--provider", 
+    final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
       "sdff://file/tmp/keystore.jceks"};
     
     int rc = 0;
@@ -177,7 +194,7 @@ public class TestKeyShell {
 
   @Test
   public void testTransientProviderWarning() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES", "--provider", 
+    final String[] args1 = {"create", "key1", "--cipher", "AES", "--provider",
       "user:///"};
     
     int rc = 0;
@@ -191,7 +208,7 @@ public class TestKeyShell {
   
   @Test
   public void testTransientProviderOnlyConfig() throws Exception {
-    String[] args1 = {"create", "key1"};
+    final String[] args1 = {"create", "key1"};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
@@ -206,23 +223,96 @@ public class TestKeyShell {
 
   @Test
   public void testFullCipher() throws Exception {
-    String[] args1 = {"create", "key1", "--cipher", "AES/CBC/pkcs5Padding", 
-        "--provider", "jceks://file" + tmpDir + "/keystore.jceks"};
+    final String keyName = "key1";
+    final String[] args1 = {"create", keyName, "--cipher", "AES/CBC/pkcs5Padding",
+        "--provider", jceksProvider};
     
     int rc = 0;
     KeyShell ks = new KeyShell();
     ks.setConf(new Configuration());
     rc = ks.run(args1);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"created."));
+    assertTrue(outContent.toString().contains(keyName + " has been " +
+            "successfully " +	"created."));
+
+    deleteKey(ks, keyName);
+  }
+
+  @Test
+  public void testAttributes() throws Exception {
+    int rc;
+    KeyShell ks = new KeyShell();
+    ks.setConf(new Configuration());
+
+    /* Simple creation test */
+    final String[] args1 = {"create", "keyattr1", "--provider", jceksProvider,
+            "--attr", "foo=bar"};
+    rc = ks.run(args1);
+    assertEquals(0, rc);
+    assertTrue(outContent.toString().contains("keyattr1 has been " +
+            "successfully " + "created."));
+
+    /* ...and list to see that we have the attr */
+    String listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr1"));
+    assertTrue(listOut.contains("attributes: [foo=bar]"));
 
+    /* Negative tests: no attribute */
     outContent.reset();
-    String[] args2 = {"delete", "key1", "--provider", 
-        "jceks://file" + tmpDir + "/keystore.jceks"};
+    final String[] args2 = {"create", "keyattr2", "--provider", jceksProvider,
+            "--attr", "=bar"};
+    rc = ks.run(args2);
+    assertEquals(-1, rc);
+
+    /* Not in attribute = value form */
+    outContent.reset();
+    args2[5] = "foo";
+    rc = ks.run(args2);
+    assertEquals(-1, rc);
+
+    /* No attribute or value */
+    outContent.reset();
+    args2[5] = "=";
+    rc = ks.run(args2);
+    assertEquals(-1, rc);
+
+    /* Legal: attribute is a, value is b=c */
+    outContent.reset();
+    args2[5] = "a=b=c";
     rc = ks.run(args2);
     assertEquals(0, rc);
-    assertTrue(outContent.toString().contains("key1 has been successfully " +
-    		"deleted."));
+
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr2"));
+    assertTrue(listOut.contains("attributes: [a=b=c]"));
+
+    /* Test several attrs together... */
+    outContent.reset();
+    final String[] args3 = {"create", "keyattr3", "--provider", jceksProvider,
+            "--attr", "foo = bar",
+            "--attr", " glarch =baz  ",
+            "--attr", "abc=def"};
+    rc = ks.run(args3);
+    assertEquals(0, rc);
+
+    /* ...and list to ensure they're there. */
+    listOut = listKeys(ks, true);
+    assertTrue(listOut.contains("keyattr3"));
+    assertTrue(listOut.contains("[foo=bar]"));
+    assertTrue(listOut.contains("[glarch=baz]"));
+    assertTrue(listOut.contains("[abc=def]"));
+
+    /* Negative test - repeated attributes should fail */
+    outContent.reset();
+    final String[] args4 = {"create", "keyattr4", "--provider", jceksProvider,
+            "--attr", "foo=bar",
+            "--attr", "foo=glarch"};
+    rc = ks.run(args4);
+    assertEquals(-1, rc);
+
+    /* Clean up to be a good citizen */
+    deleteKey(ks, "keyattr1");
+    deleteKey(ks, "keyattr2");
+    deleteKey(ks, "keyattr3");
   }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Sat Jul 12 02:24:40 2014
@@ -227,7 +227,7 @@ public class TestLocalFileSystem {
     try {
       fileSys.mkdirs(bad_dir);
       fail("Failed to detect existing file in path");
-    } catch (FileAlreadyExistsException e) { 
+    } catch (ParentNotDirectoryException e) {
       // Expected
     }
     

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java Sat Jul 12 02:24:40 2014
@@ -84,6 +84,19 @@ public class TestAclCommands {
   }
 
   @Test
+  public void testSetfaclValidationsWithoutPermissions() throws Exception {
+    List<AclEntry> parsedList = new ArrayList<AclEntry>();
+    try {
+      parsedList = AclEntry.parseAclSpec("user:user1:", true);
+    } catch (IllegalArgumentException e) {
+    }
+    assertTrue(parsedList.size() == 0);
+    assertFalse("setfacl should fail with less arguments",
+        0 == runCommand(new String[] { "-setfacl", "-m", "user:user1:",
+            "/path" }));
+  }
+
+  @Test
   public void testMultipleAclSpecParsing() throws Exception {
     List<AclEntry> parsedList = AclEntry.parseAclSpec(
         "group::rwx,user:user1:rwx,user:user2:rw-,"

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCopyPreserveFlag.java Sat Jul 12 02:24:40 2014
@@ -74,6 +74,8 @@ public class TestCopyPreserveFlag {
     output.close();
     fs.setTimes(FROM, MODIFICATION_TIME, 0);
     fs.setPermission(FROM, PERMISSIONS);
+    fs.setTimes(new Path("d1"), MODIFICATION_TIME, 0);
+    fs.setPermission(new Path("d1"), PERMISSIONS);
   }
 
   @After
@@ -132,4 +134,22 @@ public class TestCopyPreserveFlag {
       run(new Cp(), FROM.toString(), TO.toString());
       assertAttributesChanged();
   }
+
+  @Test(timeout = 10000)
+  public void testDirectoryCpWithP() throws Exception {
+    run(new Cp(), "-p", "d1", "d3");
+    assertEquals(fs.getFileStatus(new Path("d1")).getModificationTime(),
+        fs.getFileStatus(new Path("d3")).getModificationTime());
+    assertEquals(fs.getFileStatus(new Path("d1")).getPermission(),
+        fs.getFileStatus(new Path("d3")).getPermission());
+  }
+
+  @Test(timeout = 10000)
+  public void testDirectoryCpWithoutP() throws Exception {
+    run(new Cp(), "d1", "d4");
+    assertTrue(fs.getFileStatus(new Path("d1")).getModificationTime() !=
+        fs.getFileStatus(new Path("d4")).getModificationTime());
+    assertTrue(!fs.getFileStatus(new Path("d1")).getPermission()
+        .equals(fs.getFileStatus(new Path("d4")).getPermission()));
+  }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestActiveStandbyElectorRealZK.java Sat Jul 12 02:24:40 2014
@@ -20,7 +20,6 @@ package org.apache.hadoop.ha;
 
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 import java.util.Collections;
 import java.util.UUID;
@@ -30,7 +29,6 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.ha.ActiveStandbyElector.ActiveStandbyElectorCallback;
 import org.apache.hadoop.ha.ActiveStandbyElector.State;
 import org.apache.hadoop.util.ZKUtil.ZKAuthInfo;
-import org.apache.hadoop.util.Shell;
 import org.apache.log4j.Level;
 import org.apache.zookeeper.ZooDefs.Ids;
 import org.apache.zookeeper.server.ZooKeeperServer;
@@ -62,8 +60,6 @@ public class TestActiveStandbyElectorRea
   
   @Override
   public void setUp() throws Exception {
-    // skip tests on Windows until after resolution of ZooKeeper client bug
-    assumeTrue(!Shell.WINDOWS);
     super.setUp();
     
     zkServer = getServer(serverFactory);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverControllerStress.java Sat Jul 12 02:24:40 2014
@@ -17,14 +17,11 @@
  */
 package org.apache.hadoop.ha;
 
-import static org.junit.Assume.assumeTrue;
-
 import java.util.Random;
 
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 import org.junit.After;
 import org.junit.Before;
@@ -49,8 +46,6 @@ public class TestZKFailoverControllerStr
 
   @Before
   public void setupConfAndServices() throws Exception {
-    // skip tests on Windows until after resolution of ZooKeeper client bug
-    assumeTrue(!Shell.WINDOWS);
     conf = new Configuration();
     conf.set(ZKFailoverController.ZK_QUORUM_KEY, hostPort);
     this.cluster = new MiniZKFCCluster(conf, getServer(serverFactory));

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpCookieFlag.java Sat Jul 12 02:24:40 2014
@@ -36,6 +36,8 @@ import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
 import java.security.GeneralSecurityException;
+import java.net.HttpCookie;
+import java.util.List;
 
 public class TestHttpCookieFlag {
   private static final String BASEDIR = System.getProperty("test.build.dir",
@@ -116,8 +118,12 @@ public class TestHttpCookieFlag {
             .getConnectorAddress(0)));
     HttpURLConnection conn = (HttpURLConnection) new URL(base,
             "/echo").openConnection();
-    Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE + "=token; " +
-            "HttpOnly", conn.getHeaderField("Set-Cookie"));
+
+    String header = conn.getHeaderField("Set-Cookie");
+    List<HttpCookie> cookies = HttpCookie.parse(header);
+    Assert.assertTrue(!cookies.isEmpty());
+    Assert.assertTrue(header.contains("; HttpOnly"));
+    Assert.assertTrue("token".equals(cookies.get(0).getValue()));
   }
 
   @Test
@@ -127,8 +133,13 @@ public class TestHttpCookieFlag {
     HttpsURLConnection conn = (HttpsURLConnection) new URL(base,
             "/echo").openConnection();
     conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
-    Assert.assertEquals(AuthenticatedURL.AUTH_COOKIE + "=token; " +
-            "Secure; HttpOnly", conn.getHeaderField("Set-Cookie"));
+
+    String header = conn.getHeaderField("Set-Cookie");
+    List<HttpCookie> cookies = HttpCookie.parse(header);
+    Assert.assertTrue(!cookies.isEmpty());
+    Assert.assertTrue(header.contains("; HttpOnly"));
+    Assert.assertTrue(cookies.get(0).getSecure());
+    Assert.assertTrue("token".equals(cookies.get(0).getValue()));
   }
 
   @AfterClass

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Sat Jul 12 02:24:40 2014
@@ -49,7 +49,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.Time;
 
 public class TestNativeIO {
@@ -572,7 +571,6 @@ public class TestNativeIO {
   @Test(timeout=10000)
   public void testMlock() throws Exception {
     assumeTrue(NativeIO.isAvailable());
-    assumeTrue(Shell.LINUX);
     final File TEST_FILE = new File(new File(
         System.getProperty("test.build.data","build/test/data")),
         "testMlockFile");
@@ -607,8 +605,8 @@ public class TestNativeIO {
         sum += mapbuf.get(i);
       }
       assertEquals("Expected sums to be equal", bufSum, sum);
-      // munlock the buffer
-      NativeIO.POSIX.munlock(mapbuf, fileSize);
+      // munmap the buffer, which also implicitly unlocks it
+      NativeIO.POSIX.munmap(mapbuf);
     } finally {
       if (channel != null) {
         channel.close();

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java Sat Jul 12 02:24:40 2014
@@ -22,19 +22,22 @@ import static org.junit.Assert.assertEqu
 
 import java.io.IOException;
 import java.io.OutputStreamWriter;
+import java.io.Writer;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import static org.mockito.Mockito.*;
-
 import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.sink.GraphiteSink;
 import org.junit.Test;
+
+import static org.mockito.Mockito.*;
 import org.mockito.ArgumentCaptor;
+import org.mockito.internal.util.reflection.Whitebox;
 
 public class TestGraphiteMetrics {
     private AbstractMetric makeMetric(String name, Number value) {
@@ -55,14 +58,13 @@ public class TestGraphiteMetrics {
         metrics.add(makeMetric("foo2", 2.25));
         MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
 
-        OutputStreamWriter writer = mock(OutputStreamWriter.class);
+        OutputStreamWriter mockWriter = mock(OutputStreamWriter.class);
         ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
-
-        sink.setWriter(writer);
+        Whitebox.setInternalState(sink, "writer", mockWriter);
         sink.putMetrics(record);
 
         try {
-            verify(writer).write(argument.capture());
+            verify(mockWriter).write(argument.capture());
         } catch (IOException e) {
             e.printStackTrace();
         }
@@ -87,14 +89,13 @@ public class TestGraphiteMetrics {
         metrics.add(makeMetric("foo2", 2));
         MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
 
-        OutputStreamWriter writer = mock(OutputStreamWriter.class);
+        OutputStreamWriter mockWriter = mock(OutputStreamWriter.class);
         ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
-
-        sink.setWriter(writer);
+        Whitebox.setInternalState(sink, "writer", mockWriter);
         sink.putMetrics(record);
 
         try {
-            verify(writer).write(argument.capture());
+            verify(mockWriter).write(argument.capture());
         } catch (IOException e) {
             e.printStackTrace();
         }
@@ -107,4 +108,39 @@ public class TestGraphiteMetrics {
             result.equals("null.all.Context.Context=all.foo2 2 10\n" + 
             "null.all.Context.Context=all.foo1 1 10\n"));
     }
+    @Test(expected=MetricsException.class)
+    public void testCloseAndWrite() throws IOException {
+      GraphiteSink sink = new GraphiteSink();
+      List<MetricsTag> tags = new ArrayList<MetricsTag>();
+      tags.add(new MetricsTag(MsInfo.Context, "all"));
+      tags.add(new MetricsTag(MsInfo.Hostname, "host"));
+      Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
+      metrics.add(makeMetric("foo1", 1.25));
+      metrics.add(makeMetric("foo2", 2.25));
+      MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
+
+      OutputStreamWriter writer = mock(OutputStreamWriter.class);
+
+      Whitebox.setInternalState(sink, "writer", writer);
+      sink.close();
+      sink.putMetrics(record);
+    }
+
+    @Test
+    public void testClose(){
+        GraphiteSink sink = new GraphiteSink();
+        Writer mockWriter = mock(Writer.class);
+        Whitebox.setInternalState(sink, "writer", mockWriter);
+        try {
+            sink.close();
+        } catch (IOException ioe) {
+            ioe.printStackTrace();
+        }
+
+        try {
+            verify(mockWriter).close();
+        } catch (IOException ioe) {
+            ioe.printStackTrace();
+        }
+    }
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java Sat Jul 12 02:24:40 2014
@@ -360,6 +360,24 @@ public class TestMetricsSystemImpl {
     ms.register(ts);
   }
 
+  @Test public void testStartStopStart() {
+    DefaultMetricsSystem.shutdown(); // Clear pre-existing source names.
+    MetricsSystemImpl ms = new MetricsSystemImpl("test");
+    TestSource ts = new TestSource("ts");
+    ms.start();
+    ms.register("ts", "", ts);
+    MetricsSourceAdapter sa = ms.getSourceAdapter("ts");
+    assertNotNull(sa);
+    assertNotNull(sa.getMBeanName());
+    ms.stop();
+    ms.shutdown();
+    ms.start();
+    sa = ms.getSourceAdapter("ts");
+    assertNotNull(sa);
+    assertNotNull(sa.getMBeanName());
+    ms.stop();
+    ms.shutdown();
+  }
 
   private void checkMetricsRecords(List<MetricsRecord> recs) {
     LOG.debug(recs);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/source/TestJvmMetrics.java Sat Jul 12 02:24:40 2014
@@ -19,18 +19,25 @@
 package org.apache.hadoop.metrics2.source;
 
 import org.junit.Test;
+
 import static org.mockito.Mockito.*;
 import static org.apache.hadoop.test.MetricsAsserts.*;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.metrics2.MetricsCollector;
 import org.apache.hadoop.metrics2.MetricsRecordBuilder;
+import org.apache.hadoop.util.JvmPauseMonitor;
+
 import static org.apache.hadoop.metrics2.source.JvmMetricsInfo.*;
 import static org.apache.hadoop.metrics2.impl.MsInfo.*;
 
 public class TestJvmMetrics {
 
   @Test public void testPresence() {
-    MetricsRecordBuilder rb = getMetrics(new JvmMetrics("test", "test"));
+    JvmPauseMonitor pauseMonitor = new JvmPauseMonitor(new Configuration());
+    JvmMetrics jvmMetrics = new JvmMetrics("test", "test");
+    jvmMetrics.setPauseMonitor(pauseMonitor);
+    MetricsRecordBuilder rb = getMetrics(jvmMetrics);
     MetricsCollector mc = rb.parent();
 
     verify(mc).addRecord(JvmMetrics);

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java Sat Jul 12 02:24:40 2014
@@ -151,7 +151,7 @@ public class TestSocketIOWithTimeout {
       // simulate a partial write scenario.  Attempts were made to switch the
       // test from using a pipe to a network socket and also to use larger and
       // larger buffers in doIO.  Nothing helped the situation though.
-      if (!Shell.WINDOWS) {
+      if (!Shell.WINDOWS && !Shell.PPC_64) {
         try {
           out.write(1);
           fail("Did not throw");

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java Sat Jul 12 02:24:40 2014
@@ -17,27 +17,25 @@
  */
 package org.apache.hadoop.security.authorize;
 
-import java.util.Iterator;
-import java.util.Set;
-import java.util.List;
-
-import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
+
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.util.NativeCodeLoader;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.security.Groups;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.util.NativeCodeLoader;
+import org.junit.Test;
 
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Evolving
@@ -221,8 +219,8 @@ public class TestAccessControlList {
   @Test
   public void testAccessControlList() throws Exception {
     AccessControlList acl;
-    Set<String> users;
-    Set<String> groups;
+    Collection<String> users;
+    Collection<String> groups;
     
     acl = new AccessControlList("drwho tardis");
     users = acl.getUsers();
@@ -273,8 +271,8 @@ public class TestAccessControlList {
   @Test
   public void testAddRemoveAPI() {
     AccessControlList acl;
-    Set<String> users;
-    Set<String> groups;
+    Collection<String> users;
+    Collection<String> groups;
     acl = new AccessControlList(" ");
     assertEquals(0, acl.getUsers().size());
     assertEquals(0, acl.getGroups().size());

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Sat Jul 12 02:24:40 2014
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertEqu
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
+import java.security.SecureRandom;
 import java.util.Arrays;
 import java.util.Collection;
 
@@ -50,6 +51,7 @@ public class TestProxyUsers {
   private static final String[] SUDO_GROUP_NAMES =
     new String[] { "sudo_proxied_user" };
   private static final String PROXY_IP = "1.2.3.4";
+  private static final String PROXY_IP_RANGE = "10.222.0.0/16,10.113.221.221";
 
   /**
    * Test the netgroups (groups in ACL rules that start with @)
@@ -140,7 +142,6 @@ public class TestProxyUsers {
       PROXY_IP);
     ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
 
-
     // First try proxying a group that's allowed
     UserGroupInformation realUserUgi = UserGroupInformation
         .createRemoteUser(REAL_USER_NAME);
@@ -295,6 +296,29 @@ public class TestProxyUsers {
     assertNotAuthorized(proxyUserUgi, "1.2.3.4");
     assertNotAuthorized(proxyUserUgi, "1.2.3.5");
   }
+  
+  @Test
+  public void testIPRange() {
+    Configuration conf = new Configuration();
+    conf.set(
+        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+        "*");
+    conf.set(
+        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        PROXY_IP_RANGE);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+
+    // First try proxying a group that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertAuthorized(proxyUserUgi, "10.222.0.0");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "10.221.0.0");
+  }
 
   @Test
   public void testWithDuplicateProxyGroups() throws Exception {
@@ -362,6 +386,30 @@ public class TestProxyUsers {
      // From bad IP
      assertNotAuthorized(proxyUserUgi, "1.2.3.5");
    }
+  
+  @Test
+  public void testWithProxyGroupsAndUsersWithSpaces() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(
+        DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
+        StringUtils.join(",", Arrays.asList(PROXY_USER_NAME + " ",AUTHORIZED_PROXY_USER_NAME, "ONEMORE")));
+
+    conf.set(
+      DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
+    
+    conf.set(
+      DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      PROXY_IP);
+    
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+    
+    Collection<String> groupsToBeProxied = 
+        ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
+        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
+    
+    assertEquals (GROUP_NAMES.length, groupsToBeProxied.size());
+  }
 
 
   private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
@@ -408,4 +456,71 @@ public class TestProxyUsers {
       return null;
     }
   }
+  
+  public static void loadTest(String ipString, int testRange) {
+    Configuration conf = new Configuration();
+    conf.set(
+        DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+        StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
+
+    conf.set(
+        DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
+        ipString
+        );
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+
+
+    // First try proxying a group that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    long startTime = System.nanoTime();
+    SecureRandom sr = new SecureRandom();
+    for (int i=1; i < 1000000; i++){
+      try {
+        ProxyUsers.authorize(proxyUserUgi,  "1.2.3."+ sr.nextInt(testRange));
+       } catch (AuthorizationException e) {
+      }
+    }
+    long stopTime = System.nanoTime();
+    long elapsedTime = stopTime - startTime;
+    System.out.println(elapsedTime/1000000 + " ms");
+  }
+  
+  /**
+   * invokes the load Test
+   * A few sample invocations  are as below
+   * TestProxyUsers ip 128 256
+   * TestProxyUsers range 1.2.3.0/25 256
+   * TestProxyUsers ip 4 8
+   * TestProxyUsers range 1.2.3.0/30 8
+   * @param args
+   */
+  public static void main (String[] args){
+    String ipValues = null;
+
+    if (args.length != 3 || (!args[0].equals("ip") && !args[0].equals("range"))) {
+      System.out.println("Invalid invocation. The right syntax is ip/range <numberofIps/cidr> <testRange>");
+    }
+    else {
+      if (args[0].equals("ip")){
+        int numberOfIps =  Integer.parseInt(args[1]);
+        StringBuilder sb = new StringBuilder();
+        for (int i=0; i < numberOfIps; i++){
+          sb.append("1.2.3."+ i + ",");
+        }
+        ipValues = sb.toString();
+      }
+      else if (args[0].equals("range")){
+        ipValues = args[1];
+      }
+
+      int testRange = Integer.parseInt(args[2]);
+
+      loadTest(ipValues, testRange);
+    }
+  }
+
 }

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java Sat Jul 12 02:24:40 2014
@@ -20,12 +20,14 @@ package org.apache.hadoop.util;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.PrintStream;
+import java.lang.reflect.Constructor;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Properties;
 import java.util.Random;
 import java.util.zip.CRC32;
 import java.util.zip.Checksum;
+
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -187,12 +189,18 @@ public class TestPureJavaCrc32 {
       long polynomial = Long.parseLong(args[0], 16);
       
       int i = 8;
-      final PrintStream out = new PrintStream(
-          new FileOutputStream("table" + i + ".txt"), true);
       final Table t = new Table(i, 16, polynomial);
       final String s = t.toString();
       System.out.println(s);
-      out.println(s);
+
+      //print to a file
+      final PrintStream out = new PrintStream(
+          new FileOutputStream("table" + i + ".txt"), true);
+      try {
+        out.println(s);
+      } finally {
+        out.close();
+      }
     }
   }
   
@@ -210,10 +218,15 @@ public class TestPureJavaCrc32 {
     public static final int MAX_LEN = 32*1024*1024; // up to 32MB chunks
     public static final int BYTES_PER_SIZE = MAX_LEN * 4;
 
-    static final Checksum zip = new CRC32(); 
-    static final Checksum[] CRCS = {new PureJavaCrc32()};
+    static final Class<? extends Checksum> zip = CRC32.class; 
+    static final List<Class<? extends Checksum>> CRCS = new ArrayList<Class<? extends Checksum>>();
+    static {
+      CRCS.add(zip);
+      CRCS.add(PureJavaCrc32.class);
+    }
+      
 
-    public static void main(String args[]) {
+    public static void main(String args[]) throws Exception {
       printSystemProperties(System.out);
       doBench(CRCS, System.out);
     }
@@ -223,76 +236,140 @@ public class TestPureJavaCrc32 {
       out.printf(" %" + w + "s |", s);
     }
 
-    private static void doBench(final Checksum[] crcs, final PrintStream out) {
-      final ArrayList<Checksum> a = new ArrayList<Checksum>();
-      a.add(zip);
-      for (Checksum c : crcs)
-        if(c.getClass() != zip.getClass())
-          a.add(c);
-      doBench(a, out);
-    }
-
-    private static void doBench(final List<Checksum> crcs, final PrintStream out
-        ) {
+    private static void doBench(final List<Class<? extends Checksum>> crcs,
+        final PrintStream out) throws Exception {
       final byte[] bytes = new byte[MAX_LEN];
       new Random().nextBytes(bytes);
 
       // Print header
-      out.printf("\nPerformance Table (The unit is MB/sec)\n||");
-      final String title = "Num Bytes";
-      printCell("Num Bytes", 0, out);
-      for (Checksum c : crcs) {
-        out.printf("|");
-        printCell(c.getClass().getSimpleName(), 8, out);
-      }
-      out.printf("|\n");
+      out.printf("\nPerformance Table (The unit is MB/sec; #T = #Theads)\n");
 
       // Warm up implementations to get jit going.
-      for (Checksum c : crcs) {
-        doBench(c, bytes, 2, null);
-        doBench(c, bytes, 2101, null);
+      for (Class<? extends Checksum> c : crcs) {
+        doBench(c, 1, bytes, 2);
+        doBench(c, 1, bytes, 2101);
+      }
+
+      // Test on a variety of sizes with different number of threads
+      for (int size = 32; size <= MAX_LEN; size <<= 1) {
+        doBench(crcs, bytes, size, out);
+      }
+    }
+
+    private static void doBench(final List<Class<? extends Checksum>> crcs,
+        final byte[] bytes, final int size, final PrintStream out) throws Exception {
+      final String numBytesStr = " #Bytes ";
+      final String numThreadsStr = "#T";
+      final String diffStr = "% diff";
+
+      out.print('|');
+      printCell(numBytesStr, 0, out);
+      printCell(numThreadsStr, 0, out);
+      for (int i = 0; i < crcs.size(); i++) {
+        final Class<? extends Checksum> c = crcs.get(i);
+        out.print('|');
+        printCell(c.getSimpleName(), 8, out);
+        for(int j = 0; j < i; j++) {
+          printCell(diffStr, diffStr.length(), out);
+        }
       }
+      out.printf("\n");
 
-      // Test on a variety of sizes
-      for (int size = 1; size < MAX_LEN; size *= 2) {
+      for(int numThreads = 1; numThreads <= 16; numThreads <<= 1) {
         out.printf("|");
-        printCell(String.valueOf(size), title.length()+1, out);
+        printCell(String.valueOf(size), numBytesStr.length(), out);
+        printCell(String.valueOf(numThreads), numThreadsStr.length(), out);
 
-        Long expected = null;
-        for(Checksum c : crcs) {
+        BenchResult expected = null;
+        final List<BenchResult> previous = new ArrayList<BenchResult>();
+        for(Class<? extends Checksum> c : crcs) {
           System.gc();
-          final long result = doBench(c, bytes, size, out);
-          if(c.getClass() == zip.getClass()) {
+
+          final BenchResult result = doBench(c, numThreads, bytes, size);
+          printCell(String.format("%9.1f", result.mbps),
+              c.getSimpleName().length()+1, out);
+
+          //check result
+          if(c == zip) {
             expected = result;
-          } else if (result != expected) {
-            throw new RuntimeException(c.getClass() + " has bugs!");
+          } else if (expected == null) {
+            throw new RuntimeException("The first class is "
+                + c.getName() + " but not " + zip.getName());
+          } else if (result.value != expected.value) {
+            throw new RuntimeException(c + " has bugs!");
+          }
+
+          //compare result with previous
+          for(BenchResult p : previous) {
+            final double diff = (result.mbps - p.mbps) / p.mbps * 100;
+            printCell(String.format("%5.1f%%", diff), diffStr.length(), out);
           }
-            
+          previous.add(result);
         }
         out.printf("\n");
       }
     }
 
-    private static long doBench(Checksum crc, byte[] bytes, int size,
-        PrintStream out) {
-      final String name = crc.getClass().getSimpleName();
-      final int trials = BYTES_PER_SIZE / size;
-
-      final long st = System.nanoTime();
-      crc.reset();
-      for (int i = 0; i < trials; i++) {
-        crc.update(bytes, 0, size);
-      }
-      final long result = crc.getValue();
-      final long et = System.nanoTime();
-
-      double mbProcessed = trials * size / 1024.0 / 1024.0;
-      double secsElapsed = (et - st) / 1000000000.0d;
-      if (out != null) {
-        final String s = String.format("%9.3f",  mbProcessed/secsElapsed);
-        printCell(s, name.length()+1, out);
+    private static BenchResult doBench(Class<? extends Checksum> clazz,
+        final int numThreads, final byte[] bytes, final int size)
+            throws Exception {
+
+      final Thread[] threads = new Thread[numThreads];
+      final BenchResult[] results = new BenchResult[threads.length];
+
+      {
+        final int trials = BYTES_PER_SIZE / size;
+        final double mbProcessed = trials * size / 1024.0 / 1024.0;
+        final Constructor<? extends Checksum> ctor = clazz.getConstructor();
+
+        for(int i = 0; i < threads.length; i++) {
+          final int index = i;
+          threads[i] = new Thread() {
+            final Checksum crc = ctor.newInstance();
+  
+            @Override
+            public void run() {
+              final long st = System.nanoTime();
+              crc.reset();
+              for (int i = 0; i < trials; i++) {
+                crc.update(bytes, 0, size);
+              }
+              final long et = System.nanoTime();
+              double secsElapsed = (et - st) / 1000000000.0d;
+              results[index] = new BenchResult(crc.getValue(), mbProcessed/secsElapsed);
+            }
+          };
+        }
+      }
+
+      for(int i = 0; i < threads.length; i++) {
+        threads[i].start();
+      }
+      for(int i = 0; i < threads.length; i++) {
+        threads[i].join();
+      }
+
+      final long expected = results[0].value;
+      double sum = results[0].mbps;
+      for(int i = 1; i < results.length; i++) {
+        if (results[i].value != expected) {
+          throw new AssertionError(clazz.getSimpleName() + " results not matched.");
+        }
+        sum += results[i].mbps;
+      }
+      return new BenchResult(expected, sum/results.length);
+    }
+
+    private static class BenchResult {
+      /** CRC value */
+      final long value;
+      /** Speed (MB per second) */
+      final double mbps;
+      
+      BenchResult(long value, double mbps) {
+        this.value = value;
+        this.mbps = mbps;
       }
-      return result;
     }
     
     private static void printSystemProperties(PrintStream out) {

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java Sat Jul 12 02:24:40 2014
@@ -90,8 +90,8 @@ public class KMS {
 
   private static KeyProvider.KeyVersion removeKeyMaterial(
       KeyProvider.KeyVersion keyVersion) {
-    return new KMSClientProvider.KMSKeyVersion(keyVersion.getVersionName(),
-        null);
+    return new KMSClientProvider.KMSKeyVersion(keyVersion.getName(),
+        keyVersion.getVersionName(), null);
   }
 
   private static URI getKeyURI(String name) throws URISyntaxException {
@@ -103,6 +103,7 @@ public class KMS {
   @Path(KMSRESTConstants.KEYS_RESOURCE)
   @Consumes(MediaType.APPLICATION_JSON)
   @Produces(MediaType.APPLICATION_JSON)
+  @SuppressWarnings("unchecked")
   public Response createKey(@Context SecurityContext securityContext,
       Map jsonKey) throws Exception {
     KMSWebApp.getAdminCallsMeter().mark();
@@ -116,7 +117,8 @@ public class KMS {
                  ? (Integer) jsonKey.get(KMSRESTConstants.LENGTH_FIELD) : 0;
     String description = (String)
         jsonKey.get(KMSRESTConstants.DESCRIPTION_FIELD);
-
+    Map<String, String> attributes = (Map<String, String>)
+        jsonKey.get(KMSRESTConstants.ATTRIBUTES_FIELD);
     if (material != null) {
       assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user,
           CREATE_KEY + " with user provided material", name);
@@ -130,6 +132,7 @@ public class KMS {
       options.setBitLength(length);
     }
     options.setDescription(description);
+    options.setAttributes(attributes);
 
     KeyProvider.KeyVersion keyVersion = (material != null)
         ? provider.createKey(name, Base64.decodeBase64(material), options)

Modified: hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java?rev=1609878&r1=1609877&r2=1609878&view=diff
==============================================================================
--- hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java (original)
+++ hadoop/common/branches/YARN-1051/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSServerJSONUtils.java Sat Jul 12 02:24:40 2014
@@ -35,6 +35,8 @@ public class KMSServerJSONUtils {
   public static Map toJSON(KeyProvider.KeyVersion keyVersion) {
     Map json = new LinkedHashMap();
     if (keyVersion != null) {
+      json.put(KMSRESTConstants.NAME_FIELD,
+          keyVersion.getName());
       json.put(KMSRESTConstants.VERSION_NAME_FIELD,
           keyVersion.getVersionName());
       json.put(KMSRESTConstants.MATERIAL_FIELD, keyVersion.getMaterial());
@@ -61,6 +63,7 @@ public class KMSServerJSONUtils {
       json.put(KMSRESTConstants.CIPHER_FIELD, meta.getCipher());
       json.put(KMSRESTConstants.LENGTH_FIELD, meta.getBitLength());
       json.put(KMSRESTConstants.DESCRIPTION_FIELD, meta.getDescription());
+      json.put(KMSRESTConstants.ATTRIBUTES_FIELD, meta.getAttributes());
       json.put(KMSRESTConstants.CREATED_FIELD,
           meta.getCreated().getTime());
       json.put(KMSRESTConstants.VERSIONS_FIELD,