You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2017/09/12 00:35:38 UTC

[36/84] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
new file mode 100644
index 0000000..c0712c3
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
@@ -0,0 +1,680 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SaslPropertiesResolver;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.Shell;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.crypto.Cipher;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.lang.reflect.InvocationTargetException;
+import java.net.InetAddress;
+import java.security.NoSuchAlgorithmException;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import static org.apache.hadoop.security.UserGroupInformation.*;
+import static org.apache.hadoop.security.authentication.util.KerberosUtil.*;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*;
+
+/**
+ * Kerberos diagnostics
+ * At some point this may move to hadoop core, so please keep use of slider
+ * methods and classes to ~0.
+ *
+ * This operation expands some of the diagnostic output of the security code,
+ * but not all. For completeness
+ *
+ * Set the environment variable {@code HADOOP_JAAS_DEBUG=true}
+ * Set the log level for {@code org.apache.hadoop.security=DEBUG}
+ */
+public class KerberosDiags implements Closeable {
+
+  private static final Logger LOG = LoggerFactory.getLogger(KerberosDiags.class);
+  public static final String KRB5_CCNAME = "KRB5CCNAME";
+  public static final String JAVA_SECURITY_KRB5_CONF
+    = "java.security.krb5.conf";
+  public static final String JAVA_SECURITY_KRB5_REALM
+    = "java.security.krb5.realm";
+  public static final String SUN_SECURITY_KRB5_DEBUG
+    = "sun.security.krb5.debug";
+  public static final String SUN_SECURITY_SPNEGO_DEBUG
+    = "sun.security.spnego.debug";
+  public static final String SUN_SECURITY_JAAS_FILE
+    = "java.security.auth.login.config";
+  public static final String KERBEROS_KINIT_COMMAND
+    = "hadoop.kerberos.kinit.command";
+  public static final String HADOOP_AUTHENTICATION_IS_DISABLED
+      = "Hadoop authentication is disabled";
+  public static final String UNSET = "(unset)";
+  public static final String NO_DEFAULT_REALM = "Cannot locate default realm";
+
+  private final Configuration conf;
+  private final List<String> services;
+  private final PrintStream out;
+  private final File keytab;
+  private final String principal;
+  private final long minKeyLength;
+  private final boolean securityRequired;
+
+  public static final String CAT_JVM = "JVM";
+  public static final String CAT_JAAS = "JAAS";
+  public static final String CAT_CONFIG = "CONFIG";
+  public static final String CAT_LOGIN = "LOGIN";
+  public static final String CAT_KERBEROS = "KERBEROS";
+  public static final String CAT_SASL = "SASL";
+
+  @SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
+  public KerberosDiags(Configuration conf,
+      PrintStream out,
+      List<String> services,
+      File keytab,
+      String principal,
+      long minKeyLength,
+      boolean securityRequired) {
+    this.conf = conf;
+    this.services = services;
+    this.keytab = keytab;
+    this.principal = principal;
+    this.out = out;
+    this.minKeyLength = minKeyLength;
+    this.securityRequired = securityRequired;
+  }
+
+  @Override
+  public void close() throws IOException {
+    flush();
+  }
+
+  /**
+   * Execute diagnostics.
+   * <p>
+   * Things it would be nice if UGI made accessible
+   * <ol>
+   *   <li>A way to enable JAAS debug programatically</li>
+   *   <li>Access to the TGT</li>
+   * </ol>
+   * @return true if security was enabled and all probes were successful
+   * @throws KerberosDiagsFailure explicitly raised failure
+   * @throws Exception other security problems
+   */
+  @SuppressWarnings("deprecation")
+  public boolean execute() throws Exception {
+
+    title("Kerberos Diagnostics scan at %s",
+        new Date(System.currentTimeMillis()));
+
+    // check that the machine has a name
+    println("Hostname: %s",
+        InetAddress.getLocalHost().getCanonicalHostName());
+
+    // Fail fast on a JVM without JCE installed.
+    validateKeyLength();
+
+    // look at realm
+    println("JVM Kerberos Login Module = %s", getKrb5LoginModuleName());
+    printDefaultRealm();
+
+    title("System Properties");
+    for (String prop : new String[]{
+      JAVA_SECURITY_KRB5_CONF,
+      JAVA_SECURITY_KRB5_REALM,
+      SUN_SECURITY_KRB5_DEBUG,
+      SUN_SECURITY_SPNEGO_DEBUG,
+      SUN_SECURITY_JAAS_FILE
+    }) {
+      printSysprop(prop);
+    }
+
+    title("Environment Variables");
+    for (String env : new String[]{
+      "HADOOP_JAAS_DEBUG",
+      KRB5_CCNAME,
+      "HADOOP_USER_NAME",
+      "HADOOP_PROXY_USER",
+      HADOOP_TOKEN_FILE_LOCATION,
+    }) {
+      printEnv(env);
+    }
+
+    for (String prop : new String[]{
+      KERBEROS_KINIT_COMMAND,
+      HADOOP_SECURITY_AUTHENTICATION,
+      HADOOP_SECURITY_AUTHORIZATION,
+      "hadoop.kerberos.min.seconds.before.relogin",    // not in 2.6
+      "hadoop.security.dns.interface",   // not in 2.6
+      "hadoop.security.dns.nameserver",  // not in 2.6
+      HADOOP_RPC_PROTECTION,
+      HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS,
+      HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX,
+      HADOOP_SECURITY_GROUP_MAPPING,
+      "hadoop.security.impersonation.provider.class",    // not in 2.6
+      "dfs.data.transfer.protection" // HDFS
+    }) {
+      printConfOpt(prop);
+    }
+
+    // check that authentication is enabled
+    if (SecurityUtil.getAuthenticationMethod(conf)
+        .equals(AuthenticationMethod.SIMPLE)) {
+      println(HADOOP_AUTHENTICATION_IS_DISABLED);
+      failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED);
+      // no security, skip rest of test
+      return false;
+    }
+
+    validateKrb5File();
+    validateSasl(HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS);
+    validateSasl("dfs.data.transfer.saslproperties.resolver.class");
+    validateKinitExecutable();
+    validateJAAS();
+    // now the big test: login, then try again
+    boolean krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG);
+    boolean spnegoDebug = getAndSet(SUN_SECURITY_SPNEGO_DEBUG);
+    try {
+      title("Logging in");
+
+      if (keytab != null) {
+        dumpKeytab(keytab);
+        loginFromKeytab();
+      } else {
+        UserGroupInformation loginUser = getLoginUser();
+        dumpUGI("Log in user", loginUser);
+        validateUGI("Login user", loginUser);
+        println("Ticket based login: %b", isLoginTicketBased());
+        println("Keytab based login: %b", isLoginKeytabBased());
+      }
+
+      return true;
+    } finally {
+      // restore original system properties
+      System.setProperty(SUN_SECURITY_KRB5_DEBUG,
+        Boolean.toString(krb5Debug));
+      System.setProperty(SUN_SECURITY_SPNEGO_DEBUG,
+        Boolean.toString(spnegoDebug));
+    }
+  }
+
+  /**
+   * Fail fast on a JVM without JCE installed.
+   *
+   * This is a recurrent problem
+   * (that is: it keeps creeping back with JVM updates);
+   * a fast failure is the best tactic
+   * @throws NoSuchAlgorithmException
+   */
+
+  protected void validateKeyLength() throws NoSuchAlgorithmException {
+    int aesLen = Cipher.getMaxAllowedKeyLength("AES");
+    println("Maximum AES encryption key length %d bits", aesLen);
+    failif (aesLen < minKeyLength,
+        CAT_JVM,
+        "Java Cryptography Extensions are not installed on this JVM."
+        +" Maximum supported key length %s - minimum required %d",
+        aesLen, minKeyLength);
+  }
+
+  /**
+   * Get the default realm.
+   * <p>
+   * Not having a default realm may be harmless, so is noted at info.
+   * All other invocation failures are downgraded to warn, as
+   * follow-on actions may still work.
+   * failure to invoke the method via introspection is rejected,
+   * as it's a sign of JVM compatibility issues that may have other
+   * consequences
+   */
+  protected void printDefaultRealm() {
+    try {
+      println("Default Realm = %s",
+          getDefaultRealm());
+    } catch (ClassNotFoundException
+        | IllegalAccessException
+        | NoSuchMethodException e) {
+
+      throw new KerberosDiagsFailure(CAT_JVM, e,
+          "Failed to invoke krb5.Config.getDefaultRealm: %s", e);
+    } catch (InvocationTargetException e) {
+      Throwable cause = e.getCause() != null ? e.getCause() : e;
+      if (cause.toString().contains(NO_DEFAULT_REALM)) {
+        // exception raised if there is no default realm. This is not
+        // always a problem, so downgrade to a message.
+        println("Host has no default realm");
+        LOG.debug(cause.toString(), cause);
+      } else {
+        println("Kerberos.getDefaultRealm() failed: %s\n%s",
+            cause,
+            org.apache.hadoop.util.StringUtils.stringifyException(cause));
+      }
+    }
+  }
+
+  /**
+   * Locate the krb5.conf file and dump it.
+   * No-op on windows.
+   * @throws IOException
+   */
+  private void validateKrb5File() throws IOException {
+    if (!Shell.WINDOWS) {
+      title("Locating Kerberos configuration file");
+      String krbPath = "/etc/krb5.conf";
+      String jvmKrbPath = System.getProperty(JAVA_SECURITY_KRB5_CONF);
+      if (jvmKrbPath != null) {
+        println("Setting kerberos path from sysprop %s: %s",
+          JAVA_SECURITY_KRB5_CONF, jvmKrbPath);
+        krbPath = jvmKrbPath;
+      }
+
+      String krb5name = System.getenv(KRB5_CCNAME);
+      if (krb5name != null) {
+        println("Setting kerberos path from environment variable %s: %s",
+          KRB5_CCNAME, krb5name);
+        krbPath = krb5name;
+        if (jvmKrbPath != null) {
+          println("Warning - both %s and %s were set - %s takes priority",
+            JAVA_SECURITY_KRB5_CONF, KRB5_CCNAME, KRB5_CCNAME);
+        }
+      }
+
+      File krbFile = new File(krbPath);
+      println("Kerberos configuration file = %s", krbFile);
+      failif(!krbFile.exists(),
+          CAT_KERBEROS,
+          "Kerberos configuration file %s not found", krbFile);
+      dump(krbFile);
+    }
+  }
+
+  /**
+   * Dump a keytab: list all principals.
+   * @param keytabFile the keytab file
+   * @throws IOException IO problems
+   */
+  public void dumpKeytab(File keytabFile) throws IOException {
+    title("Examining keytab %s", keytabFile);
+    File kt = keytabFile.getCanonicalFile();
+    failif(!kt.exists(), CAT_CONFIG, "Keytab not found: %s", kt);
+    failif(!kt.isFile(), CAT_CONFIG, "Keytab is not a valid file: %s", kt);
+
+    String[] names = getPrincipalNames(keytabFile.getCanonicalPath(),
+        Pattern.compile(".*"));
+    println("keytab entry count: %d", names.length);
+    for (String name : names) {
+      println("    %s", name);
+    }
+    println("-----");
+  }
+
+  /**
+   * Log in from a keytab, dump the UGI, validate it, then try and log in again.
+   * That second-time login catches JVM/Hadoop compatibility problems.
+   * @throws IOException
+   */
+  private void loginFromKeytab() throws IOException {
+    UserGroupInformation ugi;
+    String identity;
+    if (keytab != null) {
+      File kt = keytab.getCanonicalFile();
+      println("Using keytab %s principal %s", kt, principal);
+      identity = principal;
+
+      failif(StringUtils.isEmpty(principal), CAT_KERBEROS,
+          "No principal defined");
+      ugi = loginUserFromKeytabAndReturnUGI(principal, kt.getPath());
+      dumpUGI(identity, ugi);
+      validateUGI(principal, ugi);
+
+      title("Attempting to log in from keytab again");
+      // package scoped -hence the reason why this class must be in the
+      // hadoop.security package
+      setShouldRenewImmediatelyForTests(true);
+      // attempt a new login
+      ugi.reloginFromKeytab();
+    } else {
+      println("No keytab: logging is as current user");
+    }
+  }
+
+  /**
+   * Dump a UGI.
+   * @param title title of this section
+   * @param ugi UGI to dump
+   * @throws IOException
+   */
+  private void dumpUGI(String title, UserGroupInformation ugi)
+    throws IOException {
+    title(title);
+    println("UGI instance = %s", ugi);
+    println("Has kerberos credentials: %b", ugi.hasKerberosCredentials());
+    println("Authentication method: %s", ugi.getAuthenticationMethod());
+    println("Real Authentication method: %s",
+      ugi.getRealAuthenticationMethod());
+    title("Group names");
+    for (String name : ugi.getGroupNames()) {
+      println(name);
+    }
+    title("Credentials");
+    Credentials credentials = ugi.getCredentials();
+    List<Text> secretKeys = credentials.getAllSecretKeys();
+    title("Secret keys");
+    if (!secretKeys.isEmpty()) {
+      for (Text secret: secretKeys) {
+        println("%s", secret);
+      }
+    } else {
+      println("(none)");
+    }
+
+    dumpTokens(ugi);
+  }
+
+  /**
+   * Validate the UGI: verify it is kerberized.
+   * @param messagePrefix message in exceptions
+   * @param user user to validate
+   */
+  private void validateUGI(String messagePrefix, UserGroupInformation user) {
+    failif(!user.hasKerberosCredentials(),
+        CAT_LOGIN, "%s: No kerberos credentials for %s", messagePrefix, user);
+    failif(user.getAuthenticationMethod() == null,
+        CAT_LOGIN, "%s: Null AuthenticationMethod for %s", messagePrefix, user);
+  }
+
+  /**
+   * A cursory look at the {@code kinit} executable.
+   * If it is an absolute path: it must exist with a size > 0.
+   * If it is just a command, it has to be on the path. There's no check
+   * for that -but the PATH is printed out.
+   */
+  private void validateKinitExecutable() {
+    String kinit = conf.getTrimmed(KERBEROS_KINIT_COMMAND, "");
+    if (!kinit.isEmpty()) {
+      File kinitPath = new File(kinit);
+      println("%s = %s", KERBEROS_KINIT_COMMAND, kinitPath);
+      if (kinitPath.isAbsolute()) {
+        failif(!kinitPath.exists(), CAT_KERBEROS,
+            "%s executable does not exist: %s",
+            KERBEROS_KINIT_COMMAND, kinitPath);
+        failif(!kinitPath.isFile(), CAT_KERBEROS,
+            "%s path does not refer to a file: %s",
+            KERBEROS_KINIT_COMMAND, kinitPath);
+        failif(kinitPath.length() == 0, CAT_KERBEROS,
+            "%s file is empty: %s",
+            KERBEROS_KINIT_COMMAND, kinitPath);
+      } else {
+        println("Executable %s is relative -must be on the PATH", kinit);
+        printEnv("PATH");
+      }
+    }
+  }
+
+  /**
+   * Try to load the SASL resolver.
+   * @param saslPropsResolverKey key for the SASL resolver
+   */
+  private void validateSasl(String saslPropsResolverKey) {
+    title("Resolving SASL property %s", saslPropsResolverKey);
+    String saslPropsResolver = conf.getTrimmed(saslPropsResolverKey);
+    try {
+      Class<? extends SaslPropertiesResolver> resolverClass = conf.getClass(
+          saslPropsResolverKey,
+          SaslPropertiesResolver.class, SaslPropertiesResolver.class);
+      println("Resolver is %s", resolverClass);
+    } catch (RuntimeException e) {
+      throw new KerberosDiagsFailure(CAT_SASL, e,
+          "Failed to load %s class %s",
+          saslPropsResolverKey, saslPropsResolver);
+    }
+  }
+
+  /**
+   * Validate any JAAS entry referenced in the {@link #SUN_SECURITY_JAAS_FILE}
+   * property.
+   */
+  private void validateJAAS() {
+    String jaasFilename = System.getProperty(SUN_SECURITY_JAAS_FILE);
+    if (jaasFilename != null) {
+      title("JAAS");
+      File jaasFile = new File(jaasFilename);
+      println("JAAS file is defined in %s: %s",
+          SUN_SECURITY_JAAS_FILE, jaasFile);
+      failif(!jaasFile.exists(), CAT_JAAS,
+          "JAAS file does not exist: %s", jaasFile);
+      failif(!jaasFile.isFile(), CAT_JAAS,
+          "Specified JAAS file is not a file: %s", jaasFile);
+    }
+  }
+
+  /**
+   * Dump all tokens of a user
+   * @param user user
+   */
+  public void dumpTokens(UserGroupInformation user) {
+    Collection<Token<? extends TokenIdentifier>> tokens
+      = user.getCredentials().getAllTokens();
+    title("Token Count: %d", tokens.size());
+    for (Token<? extends TokenIdentifier> token : tokens) {
+      println("Token %s", token.getKind());
+    }
+  }
+
+  /**
+   * Set the System property to true; return the old value for caching
+   * @param sysprop property
+   * @return the previous value
+   */
+  private boolean getAndSet(String sysprop) {
+    boolean old = Boolean.getBoolean(sysprop);
+    System.setProperty(sysprop, "true");
+    return old;
+  }
+
+  /**
+   * Flush all active output channels, including {@Code System.err},
+   * so as to stay in sync with any JRE log messages.
+   */
+  private void flush() {
+    if (out != null) {
+      out.flush();
+    } else {
+      System.out.flush();
+    }
+    System.err.flush();
+  }
+
+  /**
+   * Format and print a line of output.
+   * This goes to any output file, or
+   * is logged at info. The output is flushed before and after, to
+   * try and stay in sync with JRE logging.
+   * @param format format string
+   * @param args any arguments
+   */
+  @VisibleForTesting
+  public void println(String format, Object... args) {
+    println(format(format, args));
+  }
+
+  /**
+   * Print a line of output. This goes to any output file, or
+   * is logged at info. The output is flushed before and after, to
+   * try and stay in sync with JRE logging.
+   * @param msg message string
+   */
+  @VisibleForTesting
+  private void println(String msg) {
+    flush();
+    if (out != null) {
+      out.println(msg);
+    } else {
+      LOG.info(msg);
+    }
+    flush();
+  }
+
+  /**
+   * Print a title entry
+   * @param format format string
+   * @param args any arguments
+   */
+  private void title(String format, Object... args) {
+    println("");
+    println("");
+    String msg = "== " + format(format, args) + " ==";
+    println(msg);
+    println("");
+  }
+
+  /**
+   * Print a system property, or {@link #UNSET} if unset.
+   * @param property property to print
+   */
+  private void printSysprop(String property) {
+    println("%s = \"%s\"", property,
+        System.getProperty(property, UNSET));
+  }
+
+  /**
+   * Print a configuration option, or {@link #UNSET} if unset.
+   * @param option option to print
+   */
+  private void printConfOpt(String option) {
+    println("%s = \"%s\"", option, conf.get(option, UNSET));
+  }
+
+  /**
+   * Print an environment variable's name and value; printing
+   * {@link #UNSET} if it is not set
+   * @param variable environment variable
+   */
+  private void printEnv(String variable) {
+    String env = System.getenv(variable);
+    println("%s = \"%s\"", variable, env != null ? env : UNSET);
+  }
+
+  /**
+   * Dump any file to standard out; add a trailing newline
+   * @param file file to dump
+   * @throws IOException IO problems
+   */
+  public void dump(File file) throws IOException {
+    try (FileInputStream in = new FileInputStream(file)) {
+      for (String line : IOUtils.readLines(in)) {
+        println("%s", line);
+      }
+    }
+    println("");
+  }
+
+  /**
+   * Format and raise a failure
+   *
+   * @param category category for exception
+   * @param message string formatting message
+   * @param args any arguments for the formatting
+   * @throws KerberosDiagsFailure containing the formatted text
+   */
+  private void fail(String category, String message, Object... args)
+    throws KerberosDiagsFailure {
+    throw new KerberosDiagsFailure(category, message, args);
+  }
+
+  /**
+   * Conditional failure with string formatted arguments
+   * @param condition failure condition
+   * @param category category for exception
+   * @param message string formatting message
+   * @param args any arguments for the formatting
+   * @throws KerberosDiagsFailure containing the formatted text
+   *         if the condition was met
+   */
+  private void failif(boolean condition,
+      String category,
+      String message,
+      Object... args)
+    throws KerberosDiagsFailure {
+    if (condition) {
+      fail(category, message, args);
+    }
+  }
+
+  /**
+   * Format a string, treating a call where there are no varags values
+   * as a string to pass through unformatted.
+   * @param message message, which is either a format string + args, or
+   * a general string
+   * @param args argument array
+   * @return a string for printing.
+   */
+  public static String format(String message, Object... args) {
+    if (args.length == 0) {
+      return message;
+    } else {
+      return String.format(message, args);
+    }
+  }
+
+  /**
+   * Diagnostics failures return the exit code 41, "unauthorized".
+   *
+   * They have a category, initially for testing: the category can be
+   * validated without having to match on the entire string.
+   */
+  public static class KerberosDiagsFailure extends ExitUtil.ExitException {
+    private final String category;
+
+    public KerberosDiagsFailure(String category, String message) {
+      super(41, category + ": " + message);
+      this.category = category;
+    }
+
+    public KerberosDiagsFailure(String category, String message, Object... args) {
+      this(category, format(message, args));
+    }
+
+    public KerberosDiagsFailure(String category, Throwable throwable,
+        String message, Object... args) {
+      this(category, message, args);
+      initCause(throwable);
+    }
+
+    public String getCategory() {
+      return category;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java
new file mode 100644
index 0000000..6efa880
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PatternValidator.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import java.util.regex.Pattern;
+
+/**
+ * Utility class to validate strings against a predefined pattern.
+ */
+public class PatternValidator {
+
+  public static final String E_INVALID_NAME =
+      "Invalid name %s does not match the pattern pattern %s ";
+  private final Pattern valid;
+  private final String pattern;
+
+  public PatternValidator(String pattern) {
+    this.pattern = pattern;
+    valid = Pattern.compile(pattern);
+  }
+
+  /**
+   * Validate the name -restricting it to the set defined in 
+   * @param name name to validate
+   * @throws IllegalArgumentException if not a valid name
+   */
+  public void validate(String name) {
+    if (!matches(name)) {
+      throw new IllegalArgumentException(
+          String.format(E_INVALID_NAME, name, pattern));
+    }
+  }
+
+  /**
+   * Query to see if the pattern matches
+   * @param name name to validate
+   * @return true if the string matches the pattern
+   */
+  public boolean matches(String name) {
+    return valid.matcher(name).matches();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java
new file mode 100644
index 0000000..2dbf37f
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PortScanner.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.service.utils;
+
+import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
+import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * a scanner which can take an input string for a range or scan the lot.
+ */
+public class PortScanner {
+  private static Pattern NUMBER_RANGE = Pattern.compile("^(\\d+)\\s*-\\s*(\\d+)$");
+  private static Pattern SINGLE_NUMBER = Pattern.compile("^\\d+$");
+
+  private List<Integer> remainingPortsToCheck;
+
+  public PortScanner() {
+  }
+
+  public void setPortRange(String input) throws BadConfigException {
+    // first split based on commas
+    Set<Integer> inputPorts= new TreeSet<Integer>();
+    String[] ranges = input.split(",");
+    for ( String range : ranges ) {
+      if (range.trim().isEmpty()) {
+        continue;
+      }
+      Matcher m = SINGLE_NUMBER.matcher(range.trim());
+      if (m.find()) {
+        inputPorts.add(Integer.parseInt(m.group()));
+        continue;
+      }
+      m = NUMBER_RANGE.matcher(range.trim());
+      if (m.find()) {
+        String[] boundaryValues = m.group(0).split("-");
+        int start = Integer.parseInt(boundaryValues[0].trim());
+        int end = Integer.parseInt(boundaryValues[1].trim());
+        if (end < start) {
+          throw new BadConfigException("End of port range is before start: "
+              + range + " in input: " + input);
+        }
+        for (int i = start; i < end + 1; i++) {
+          inputPorts.add(i);
+        }
+        continue;
+      }
+      throw new BadConfigException("Bad port range: " + range + " in input: "
+          + input);
+    }
+    if (inputPorts.size() == 0) {
+      throw new BadConfigException("No ports found in range: " + input);
+    }
+    this.remainingPortsToCheck = new ArrayList<Integer>(inputPorts);
+  }
+
+  public List<Integer> getRemainingPortsToCheck() {
+    return remainingPortsToCheck;
+  }
+
+  public int getAvailablePort() throws SliderException, IOException {
+    if (remainingPortsToCheck != null) {
+      return getAvailablePortViaPortArray();
+    } else {
+      return SliderUtils.getOpenPort();
+    }
+  }
+
+  private int getAvailablePortViaPortArray() throws SliderException {
+    boolean found = false;
+    int availablePort = -1;
+    Iterator<Integer> portsToCheck = this.remainingPortsToCheck.iterator();
+    while (portsToCheck.hasNext() && !found) {
+      int portToCheck = portsToCheck.next();
+      found = SliderUtils.isPortAvailable(portToCheck);
+      if (found) {
+        availablePort = portToCheck;
+        portsToCheck.remove();
+      }
+    }
+
+    if (availablePort < 0) {
+      throw new SliderException(SliderExitCodes.EXIT_BAD_CONFIGURATION,
+        "No available ports found in configured range {}",
+        remainingPortsToCheck);
+    }
+
+    return availablePort;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java
new file mode 100644
index 0000000..9d00b3c
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfiguration.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
+import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.SerializationConfig;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * JSON-serializable description of a published key-val configuration.
+ * 
+ * The values themselves are not serialized in the external view; they have
+ * to be served up by the far end
+ */
+@JsonIgnoreProperties(ignoreUnknown = true)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+public class PublishedConfiguration {
+
+  public String description;
+  public long updated;
+  
+  public String updatedTime;
+
+  public Map<String, String> entries = new HashMap<>();
+
+  public PublishedConfiguration() {
+  }
+
+  /**
+   * build an empty published configuration 
+   * @param description configuration description
+   */
+  public PublishedConfiguration(String description) {
+    this.description = description;
+  }
+
+  /**
+   * Build a configuration from the entries
+   * @param description configuration description
+   * @param entries entries to put
+   */
+  public PublishedConfiguration(String description,
+      Iterable<Map.Entry<String, String>> entries) {
+    this.description = description;
+    putValues(entries);
+  }
+
+  /**
+   * Build a published configuration, using the keys from keysource,
+   * but resolving the values from the value source, via Configuration.get()
+   * @param description configuration description
+   * @param keysource source of keys
+   * @param valuesource source of values
+   */
+  public PublishedConfiguration(String description,
+      Iterable<Map.Entry<String, String>> keysource,
+      Configuration valuesource) {
+    this.description = description;
+    putValues(ConfigHelper.resolveConfiguration(keysource, valuesource));
+  }
+
+  
+  /**
+   * Is the configuration empty. This means either that it has not
+   * been given any values, or it is stripped down copy set down over the
+   * wire.
+   * @return true if it is empty
+   */
+  public boolean isEmpty() {
+    return entries.isEmpty();
+  }
+
+
+  public void setUpdated(long updated) {
+    this.updated = updated;
+    this.updatedTime = new Date(updated).toString();
+  }
+
+  public long getUpdated() {
+    return updated;
+  }
+
+  /**
+   * Set the values from an iterable (this includes a Hadoop Configuration
+   * and Java properties object).
+   * Any existing value set is discarded
+   * @param entries entries to put
+   */
+  public void putValues(Iterable<Map.Entry<String, String>> entries) {
+    this.entries = new HashMap<String, String>();
+    for (Map.Entry<String, String> entry : entries) {
+      this.entries.put(entry.getKey(), entry.getValue());
+    }
+    
+  }
+
+  /**
+   * Convert to Hadoop XML
+   * @return the configuration as a Hadoop Configuratin
+   */
+  public Configuration asConfiguration() {
+    Configuration conf = new Configuration(false);
+    try {
+      ConfigHelper.addConfigMap(conf, entries, "");
+    } catch (BadConfigException e) {
+      // triggered on a null value; switch to a runtime (and discard the stack)
+      throw new RuntimeException(e.toString());
+    }
+    return conf;
+  }
+  
+  public String asConfigurationXML() throws IOException {
+    return ConfigHelper.toXml(asConfiguration());
+  }
+
+  /**
+   * Convert values to properties
+   * @return a property file
+   */
+  public Properties asProperties() {
+    Properties props = new Properties();
+    props.putAll(entries);
+    return props;
+  }
+
+  /**
+   * Return the values as json string
+   * @return the JSON representation
+   * @throws IOException marshalling failure
+   */
+  public String asJson() throws IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
+    String json = mapper.writeValueAsString(entries);
+    return json;
+  }
+
+
+  /**
+   * This makes a copy without the nested content -so is suitable
+   * for returning as part of the list of a parent's values
+   * @return the copy
+   */
+  public PublishedConfiguration shallowCopy() {
+    PublishedConfiguration that = new PublishedConfiguration();
+    that.description = this.description;
+    that.updated = this.updated;
+    that.updatedTime = this.updatedTime;
+    return that;
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb =
+        new StringBuilder("PublishedConfiguration{");
+    sb.append("description='").append(description).append('\'');
+    sb.append(" entries = ").append(entries.size());
+    sb.append('}');
+    return sb.toString();
+  }
+
+  /**
+   * Create an outputter for a given format
+   * @param format format to use
+   * @return an instance of output
+   */
+  public PublishedConfigurationOutputter createOutputter(ConfigFormat format) {
+    return PublishedConfigurationOutputter.createOutputter(format, this);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java
new file mode 100644
index 0000000..88ecf2c
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Preconditions;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.DumperOptions.FlowStyle;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.StringWriter;
+import java.util.Properties;
+
+/**
+ * Output a published configuration
+ */
+public abstract class PublishedConfigurationOutputter {
+
+  private static final String COMMENTS = "Generated by Apache Slider";
+
+  protected final PublishedConfiguration owner;
+
+  protected PublishedConfigurationOutputter(PublishedConfiguration owner) {
+    this.owner = owner;
+  }
+
+  /**
+   * Save the config to a destination file, in the format of this outputter
+   * @param dest destination file
+   * @throws IOException
+   */
+/* JDK7
+  public void save(File dest) throws IOException {
+    try(FileOutputStream out = new FileOutputStream(dest)) {
+      save(out);
+      out.close();
+    }
+  }
+*/
+  public void save(File dest) throws IOException {
+    FileUtils.writeStringToFile(dest, asString(), Charsets.UTF_8);
+  }
+
+  /**
+   * Save the content. The default saves the asString() value
+   * to the output stream
+   * @param out output stream
+   * @throws IOException
+   */
+  public void save(OutputStream out) throws IOException {
+    IOUtils.write(asString(), out, Charsets.UTF_8);
+  }
+  /**
+   * Convert to a string
+   * @return the string form
+   * @throws IOException
+   */
+  public abstract String asString() throws IOException;
+
+  /**
+   * Create an outputter for the chosen format
+   * @param format format enumeration
+   * @param owner owning config
+   * @return the outputter
+   */
+
+  public static PublishedConfigurationOutputter createOutputter(ConfigFormat format,
+      PublishedConfiguration owner) {
+    Preconditions.checkNotNull(owner);
+    switch (format) {
+      case XML:
+      case HADOOP_XML:
+        return new XmlOutputter(owner);
+      case PROPERTIES:
+        return new PropertiesOutputter(owner);
+      case JSON:
+        return new JsonOutputter(owner);
+      case ENV:
+        return new EnvOutputter(owner);
+      case TEMPLATE:
+        return new TemplateOutputter(owner);
+      case YAML:
+        return new YamlOutputter(owner);
+      default:
+        throw new RuntimeException("Unsupported format :" + format);
+    }
+  }
+
+  public static class XmlOutputter extends PublishedConfigurationOutputter {
+
+
+    private final Configuration configuration;
+
+    public XmlOutputter(PublishedConfiguration owner) {
+      super(owner);
+      configuration = owner.asConfiguration();
+    }
+
+    @Override
+    public void save(OutputStream out) throws IOException {
+      configuration.writeXml(out);
+    }
+
+    @Override
+    public String asString() throws IOException {
+      return ConfigHelper.toXml(configuration);
+    }
+
+    public Configuration getConfiguration() {
+      return configuration;
+    }
+  }
+
+  public static class PropertiesOutputter extends PublishedConfigurationOutputter {
+
+    private final Properties properties;
+
+    public PropertiesOutputter(PublishedConfiguration owner) {
+      super(owner);
+      properties = owner.asProperties();
+    }
+
+    @Override
+    public void save(OutputStream out) throws IOException {
+      properties.store(out, COMMENTS);
+    }
+
+
+    public String asString() throws IOException {
+      StringWriter sw = new StringWriter();
+      properties.store(sw, COMMENTS);
+      return sw.toString();
+    }
+  }
+
+
+  public static class JsonOutputter extends PublishedConfigurationOutputter {
+
+    public JsonOutputter(PublishedConfiguration owner) {
+      super(owner);
+    }
+
+    @Override
+    public String asString() throws IOException {
+      return owner.asJson();
+    }
+  }
+
+
+  public static class EnvOutputter extends PublishedConfigurationOutputter {
+
+    public EnvOutputter(PublishedConfiguration owner) {
+      super(owner);
+    }
+
+    @Override
+    public String asString() throws IOException {
+      if (!owner.entries.containsKey("content")) {
+        throw new IOException("Configuration has no content field and cannot " +
+            "be retrieved as type 'env'");
+      }
+      String content = owner.entries.get("content");
+      return ConfigUtils.replaceProps(owner.entries, content);
+    }
+  }
+
+  public static class TemplateOutputter extends EnvOutputter {
+    public TemplateOutputter(PublishedConfiguration owner) {
+      super(owner);
+    }
+  }
+
+  public static class YamlOutputter extends PublishedConfigurationOutputter {
+
+    private final Yaml yaml;
+
+    public YamlOutputter(PublishedConfiguration owner) {
+      super(owner);
+      DumperOptions options = new DumperOptions();
+      options.setDefaultFlowStyle(FlowStyle.BLOCK);
+      yaml = new Yaml(options);
+    }
+
+    public String asString() throws IOException {
+      return yaml.dump(owner.entries);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
new file mode 100644
index 0000000..405f690
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.service.utils.ApplicationReportSerDeser;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import java.io.IOException;
+
+/**
+ * Serialized form of an application report which can be persisted
+ * and then parsed. It can not be converted back into a
+ * real YARN application report
+ * 
+ * Useful for testing
+ */
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+
+public class SerializedApplicationReport {
+
+  public String applicationId;
+  public String applicationAttemptId;
+  public String name;
+  public String applicationType;
+  public String user;
+  public String queue;
+  public String host;
+  public Integer rpcPort;
+  public String state;
+  public String diagnostics;
+  public String url;
+  /**
+   * This value is non-null only when a report is generated from a submission context.
+   * The YARN {@link ApplicationReport} structure does not propagate this value
+   * from the RM.
+   */
+  public Long submitTime;
+  public Long startTime;
+  public Long finishTime;
+  public String finalStatus;
+  public String origTrackingUrl;
+  public Float progress;
+  
+  public SerializedApplicationReport() {
+  }
+  
+  public SerializedApplicationReport(ApplicationReport report) {
+    this.applicationId = report.getApplicationId().toString();
+    ApplicationAttemptId attemptId = report.getCurrentApplicationAttemptId();
+    this.applicationAttemptId = attemptId != null ? attemptId.toString() : "N/A";
+    this.name = report.getName();
+    this.applicationType = report.getApplicationType();
+    this.user = report.getUser();
+    this.queue = report.getQueue();
+    this.host = report.getHost();
+    this.rpcPort = report.getRpcPort();
+    this.state = report.getYarnApplicationState().toString();
+    this.diagnostics = report.getDiagnostics();
+    this.startTime = report.getStartTime();
+    this.finishTime = report.getFinishTime();
+    FinalApplicationStatus appStatus = report.getFinalApplicationStatus();
+    this.finalStatus = appStatus == null ? "" : appStatus.toString();
+    this.progress = report.getProgress();
+    this.url = report.getTrackingUrl();
+    this.origTrackingUrl= report.getOriginalTrackingUrl();
+  }
+
+  @Override
+  public String toString() {
+    try {
+      return ApplicationReportSerDeser.toString(this);
+    } catch (IOException e) {
+      return super.toString();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceApiUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceApiUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceApiUtil.java
index c87c3b4..21cb049 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceApiUtil.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceApiUtil.java
@@ -25,20 +25,16 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.registry.client.api.RegistryConstants;
 import org.apache.hadoop.registry.client.binding.RegistryUtils;
 import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.Configuration;
-import org.apache.slider.api.resource.Resource;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.persist.JsonSerDeser;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.Configuration;
+import org.apache.hadoop.yarn.service.api.records.Resource;
 import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
 import org.apache.hadoop.yarn.service.provider.ProviderFactory;
-import org.apache.slider.server.servicemonitor.MonitorUtils;
-import org.apache.slider.server.services.utility.PatternValidator;
-import org.apache.slider.util.RestApiConstants;
-import org.apache.slider.util.RestApiErrorMessages;
+import org.apache.hadoop.yarn.service.servicemonitor.probe.MonitorUtils;
+import org.apache.hadoop.yarn.service.conf.RestApiConstants;
+import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
 import org.codehaus.jackson.map.PropertyNamingStrategy;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -127,9 +123,7 @@ public class ServiceApiUtil {
     List<Component> componentsToAdd = new ArrayList<>();
     for (Component comp : application.getComponents()) {
       int maxCompLength = RegistryConstants.MAX_FQDN_LABEL_LENGTH;
-      if (comp.getUniqueComponentSupport()) {
-        maxCompLength = maxCompLength - Long.toString(Long.MAX_VALUE).length();
-      }
+      maxCompLength = maxCompLength - Long.toString(Long.MAX_VALUE).length();
       if (dnsEnabled && comp.getName().length() > maxCompLength) {
         throw new IllegalArgumentException(String.format(RestApiErrorMessages
             .ERROR_COMPONENT_NAME_INVALID, maxCompLength, comp.getName()));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceRegistryUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceRegistryUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceRegistryUtils.java
new file mode 100644
index 0000000..7440b11
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceRegistryUtils.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+
+
+public class ServiceRegistryUtils {
+
+  /**
+   * Base path for services
+   */
+  public static final String ZK_SERVICES = "services";
+
+  /**
+   * Base path for all Slider references
+   */
+  public static final String ZK_SLIDER = "slider";
+  public static final String ZK_USERS = "users";
+  public static final String SVC_SLIDER = "/" + ZK_SERVICES + "/" + ZK_SLIDER;
+  public static final String SVC_SLIDER_USERS = SVC_SLIDER + "/" + ZK_USERS;
+
+  /**
+   * Get the registry path for an instance under the user's home node
+   * @param instanceName application instance
+   * @return a path to the registry location for this application instance.
+   */
+  public static String registryPathForInstance(String instanceName) {
+    return RegistryUtils.servicePath(
+        RegistryUtils.currentUser(), YarnServiceConstants.APP_TYPE, instanceName
+    );
+  }
+
+  /**
+ * Build the path to a cluster; exists once the cluster has come up.
+ * Even before that, a ZK watcher could wait for it.
+ * @param username user
+ * @param clustername name of the cluster
+ * @return a strin
+ */
+  public static String mkClusterPath(String username, String clustername) {
+    return mkSliderUserPath(username) + "/" + clustername;
+  }
+
+  /**
+ * Build the path to a cluster; exists once the cluster has come up.
+ * Even before that, a ZK watcher could wait for it.
+ * @param username user
+ * @return a string
+ */
+  public static String mkSliderUserPath(String username) {
+    return SVC_SLIDER_USERS + "/" + username;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d44876af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderFileSystem.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderFileSystem.java
new file mode 100644
index 0000000..d6d664e
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderFileSystem.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+
+/**
+ * Extends Core Filesystem with operations to manipulate ClusterDescription
+ * persistent state
+ */
+public class SliderFileSystem extends CoreFileSystem {
+
+  Path appDir = null;
+
+  public SliderFileSystem(FileSystem fileSystem,
+      Configuration configuration) {
+    super(fileSystem, configuration);
+  }
+
+  public SliderFileSystem(Configuration configuration) throws IOException {
+    super(configuration);
+  }
+
+  public void setAppDir(Path appDir) {
+    this.appDir = appDir;
+  }
+
+  public Path getAppDir() {
+    return this.appDir;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org