You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2017/09/20 04:53:34 UTC

[46/84] [abbrv] hadoop git commit: YARN-7113. Clean up packaging and dependencies for yarn-native-services. Contributed by Billie Rinaldi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
index 6e6f4dd..7e53d18 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
@@ -24,35 +24,17 @@ import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.GlobFilter;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.io.nativeio.NativeIO;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.service.client.params.Arguments;
 import org.apache.hadoop.yarn.service.client.params.SliderActions;
 import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
 import org.apache.hadoop.yarn.service.containerlaunch.ClasspathConstructor;
 import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
-import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
-import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
-import org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes;
 import org.apache.hadoop.yarn.service.exceptions.SliderException;
-import org.apache.zookeeper.server.util.KerberosUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -63,32 +45,19 @@ import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.FilenameFilter;
 import java.io.IOException;
-import java.io.Serializable;
-import java.net.InetSocketAddress;
 import java.net.ServerSocket;
-import java.net.Socket;
 import java.net.URL;
 import java.net.URLDecoder;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.regex.Pattern;
 import java.util.zip.GZIPOutputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
 
 /**
  * These are slider-specific Util methods
@@ -97,41 +66,6 @@ public final class SliderUtils {
 
   private static final Logger log = LoggerFactory.getLogger(SliderUtils.class);
 
-  /**
-   * Atomic bool to track whether or not process security has already been
-   * turned on (prevents re-entrancy)
-   */
-  private static final AtomicBoolean processSecurityAlreadyInitialized =
-      new AtomicBoolean(false);
-  public static final String JAVA_SECURITY_KRB5_REALM =
-      "java.security.krb5.realm";
-  public static final String JAVA_SECURITY_KRB5_KDC = "java.security.krb5.kdc";
-
-  /**
-   * Winutils
-   */
-  public static final String WINUTILS = "WINUTILS.EXE";
-  /**
-   * name of openssl program
-   */
-  public static final String OPENSSL = "openssl";
-
-  /**
-   * name of python program
-   */
-  public static final String PYTHON = "python";
-
-  /**
-   * type of docker standalone service
-   */
-  public static final String DOCKER = "docker";
-  /**
-   * type of docker on yarn service
-   */
-  public static final String DOCKER_YARN = "yarn_docker";
-
-  public static final int NODE_LIST_LIMIT = 10;
-
   private SliderUtils() {
   }
 
@@ -153,96 +87,6 @@ public final class SliderUtils {
   }
 
   /**
-   * Probe for a collection existing and not being empty
-   * @param l collection
-   * @return true if the reference is valid and it contains entries
-   */
-
-  public static boolean isNotEmpty(Collection l) {
-    return l != null && !l.isEmpty();
-  }
-
-  /**
-   * Probe for a map existing and not being empty
-   * @param m map
-   * @return true if the reference is valid and it contains map entries
-   */
-  public static boolean isNotEmpty(Map m) {
-    return m != null && !m.isEmpty();
-  }
-  
-  /*
-   * Validates whether num is an integer
-   * @param num
-   * @param msg the message to be shown in exception
-   */
-  @SuppressWarnings("ResultOfMethodCallIgnored")
-  private static void validateNumber(String num, String msg) throws
-      BadConfigException {
-    try {
-      Integer.parseInt(num);
-    } catch (NumberFormatException nfe) {
-      throw new BadConfigException(msg + num);
-    }
-  }
-
-  /*
-   * Translates the trailing JVM heapsize unit: g, G, m, M
-   * This assumes designated unit of 'm'
-   * @param heapsize
-   * @return heapsize in MB
-   */
-  public static String translateTrailingHeapUnit(String heapsize) throws
-      BadConfigException {
-    String errMsg = "Bad heapsize: ";
-    if (heapsize.endsWith("m") || heapsize.endsWith("M")) {
-      String num = heapsize.substring(0, heapsize.length() - 1);
-      validateNumber(num, errMsg);
-      return num;
-    }
-    if (heapsize.endsWith("g") || heapsize.endsWith("G")) {
-      String num = heapsize.substring(0, heapsize.length() - 1) + "000";
-      validateNumber(num, errMsg);
-      return num;
-    }
-    // check if specified heap size is a number
-    validateNumber(heapsize, errMsg);
-    return heapsize;
-  }
-
-  /**
-   * recursive directory delete
-   * @param dir dir to delete
-   * @throws IOException on any problem
-   */
-  public static void deleteDirectoryTree(File dir) throws IOException {
-    if (dir.exists()) {
-      if (dir.isDirectory()) {
-        log.info("Cleaning up {}", dir);
-        //delete the children
-        File[] files = dir.listFiles();
-        if (files == null) {
-          throw new IOException("listfiles() failed for " + dir);
-        }
-        for (File file : files) {
-          log.info("deleting {}", file);
-          if (!file.delete()) {
-            log.warn("Unable to delete " + file);
-          }
-        }
-        if (!dir.delete()) {
-          log.warn("Unable to delete " + dir);
-        }
-      } else {
-        throw new IOException("Not a directory " + dir);
-      }
-    } else {
-      //not found, do nothing
-      log.debug("No output dir yet");
-    }
-  }
-
-  /**
    * Find a containing JAR
    * @param clazz class to find
    * @return the file
@@ -298,127 +142,6 @@ public final class SliderUtils {
     return null;
   }
 
-  public static void checkPort(String hostname, int port, int connectTimeout)
-      throws IOException {
-    InetSocketAddress addr = new InetSocketAddress(hostname, port);
-    checkPort(hostname, addr, connectTimeout);
-  }
-
-  @SuppressWarnings("SocketOpenedButNotSafelyClosed")
-  public static void checkPort(String name,
-      InetSocketAddress address,
-      int connectTimeout)
-      throws IOException {
-    try(Socket socket = new Socket()) {
-      socket.connect(address, connectTimeout);
-    } catch (Exception e) {
-      throw new IOException("Failed to connect to " + name
-                            + " at " + address
-                            + " after " + connectTimeout + "milliseconds"
-                            + ": " + e,
-          e);
-    }
-  }
-
-  public static void checkURL(String name, String url, int timeout) throws
-      IOException {
-    InetSocketAddress address = NetUtils.createSocketAddr(url);
-    checkPort(name, address, timeout);
-  }
-
-  /**
-   * A required file
-   * @param role role of the file (for errors)
-   * @param filename the filename
-   * @throws ExitUtil.ExitException if the file is missing
-   * @return the file
-   */
-  public static File requiredFile(String filename, String role) throws
-      IOException {
-    if (filename.isEmpty()) {
-      throw new ExitUtil.ExitException(-1, role + " file not defined");
-    }
-    File file = new File(filename);
-    if (!file.exists()) {
-      throw new ExitUtil.ExitException(-1,
-          role + " file not found: " +
-          file.getCanonicalPath());
-    }
-    return file;
-  }
-
-  private static final PatternValidator clusternamePattern
-      = new PatternValidator("[a-z][a-z0-9_-]*");
-
-  /**
-   * Normalize a cluster name then verify that it is valid
-   * @param name proposed cluster name
-   * @return true iff it is valid
-   */
-  public static boolean isClusternameValid(String name) {
-    return name != null && clusternamePattern.matches(name);
-  }
-
-  /**
-   * Copy a directory to a new FS -both paths must be qualified. If
-   * a directory needs to be created, supplied permissions can override
-   * the default values. Existing directories are not touched
-   * @param conf conf file
-   * @param srcDirPath src dir
-   * @param destDirPath dest dir
-   * @param permission permission for the dest directory; null means "default"
-   * @return # of files copies
-   */
-  @SuppressWarnings("deprecation")
-  public static int copyDirectory(Configuration conf,
-      Path srcDirPath,
-      Path destDirPath,
-      FsPermission permission) throws
-      IOException,
-      BadClusterStateException {
-    FileSystem srcFS = FileSystem.get(srcDirPath.toUri(), conf);
-    FileSystem destFS = FileSystem.get(destDirPath.toUri(), conf);
-    //list all paths in the src.
-    if (!srcFS.exists(srcDirPath)) {
-      throw new FileNotFoundException("Source dir not found " + srcDirPath);
-    }
-    if (!srcFS.isDirectory(srcDirPath)) {
-      throw new FileNotFoundException(
-          "Source dir not a directory " + srcDirPath);
-    }
-    GlobFilter dotFilter = new GlobFilter("[!.]*");
-    FileStatus[] entries = srcFS.listStatus(srcDirPath, dotFilter);
-    int srcFileCount = entries.length;
-    if (srcFileCount == 0) {
-      return 0;
-    }
-    if (permission == null) {
-      permission = FsPermission.getDirDefault();
-    }
-    if (!destFS.exists(destDirPath)) {
-      new SliderFileSystem(destFS, conf).createWithPermissions(destDirPath,
-          permission);
-    }
-    Path[] sourcePaths = new Path[srcFileCount];
-    for (int i = 0; i < srcFileCount; i++) {
-      FileStatus e = entries[i];
-      Path srcFile = e.getPath();
-      if (srcFS.isDirectory(srcFile)) {
-        String msg = "Configuration dir " + srcDirPath
-                     + " contains a directory " + srcFile;
-        log.warn(msg);
-        throw new IOException(msg);
-      }
-      log.debug("copying src conf file {}", srcFile);
-      sourcePaths[i] = srcFile;
-    }
-    log.debug("Copying {} files from {} to dest {}", srcFileCount,
-        srcDirPath,
-        destDirPath);
-    FileUtil.copy(srcFS, sourcePaths, destFS, destDirPath, false, true, conf);
-    return srcFileCount;
-  }
-
   /**
    * Copy a file to a new FS -both paths must be qualified.
    * @param conf conf file
@@ -497,19 +220,6 @@ public final class SliderUtils {
 
   /**
    * Join an array of strings with a separator that appears after every
-   * instance in the list -including at the end
-   * @param collection strings
-   * @param separator separator string
-   * @return the joined entries
-   */
-  public static String join(String[] collection, String separator) {
-    return join(collection, separator, true);
-
-
-  }
-
-  /**
-   * Join an array of strings with a separator that appears after every
    * instance in the list -optionally at the end
    * @param collection strings
    * @param separator separator string
@@ -522,30 +232,6 @@ public final class SliderUtils {
   }
 
   /**
-   * Join an array of strings with a separator that appears after every
-   * instance in the list -except at the end
-   * @param collection strings
-   * @param separator separator string
-   * @return the list
-   */
-  public static String joinWithInnerSeparator(String separator,
-      Object... collection) {
-    StringBuilder b = new StringBuilder();
-    boolean first = true;
-
-    for (Object o : collection) {
-      if (first) {
-        first = false;
-      } else {
-        b.append(separator);
-      }
-      b.append(o.toString());
-      b.append(separator);
-    }
-    return b.toString();
-  }
-
-  /**
    * Resolve a mandatory environment variable
    * @param key env var
    * @return the resolved value
@@ -560,170 +246,6 @@ public final class SliderUtils {
     return v;
   }
 
-  public static String appReportToString(ApplicationReport r,
-      String separator) {
-    StringBuilder builder = new StringBuilder(512);
-    builder.append("service ")
-           .append(
-               r.getName())
-           .append("/")
-           .append(r.getApplicationType())
-           .append(separator);
-    Set<String> tags = r.getApplicationTags();
-    if (!tags.isEmpty()) {
-      for (String tag : tags) {
-        builder.append(tag).append(separator);
-      }
-    }
-    DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm");
-    dateFormat.setTimeZone(TimeZone.getDefault());
-    builder.append("state: ").append(r.getYarnApplicationState());
-    String trackingUrl = r.getTrackingUrl();
-    if (isSet(trackingUrl)) {
-      builder.append(separator).append("URL: ").append(trackingUrl);
-    }
-    builder.append(separator)
-           .append("Started: ")
-           .append(dateFormat.format(new Date(r.getStartTime())));
-    long finishTime = r.getFinishTime();
-    if (finishTime > 0) {
-      builder.append(separator)
-             .append("Finished: ")
-             .append(dateFormat.format(new Date(finishTime)));
-    }
-    String rpcHost = r.getHost();
-    if (!isSet(rpcHost)) {
-      builder.append(separator)
-             .append("RPC :")
-             .append(rpcHost)
-             .append(':')
-             .append(r.getRpcPort());
-    }
-    String diagnostics = r.getDiagnostics();
-    if (!isSet(diagnostics)) {
-      builder.append(separator).append("Diagnostics :").append(diagnostics);
-    }
-    return builder.toString();
-  }
-
-  /**
-   * Filter a string value given a single filter
-   * 
-   * @param value
-   *          the string value to check
-   * @param filter
-   *          a single string filter
-   * @return return true if value should be trapped, false if it should be let
-   *         through
-   */
-  public static boolean filter(String value, String filter) {
-    return !(StringUtils.isEmpty(filter) || filter.equals(value));
-  }
-
-  /**
-   * Filter a string value given a set of filters
-   * 
-   * @param value
-   *          the string value to check
-   * @param filters
-   *          a set of string filters
-   * @return return true if value should be trapped, false if it should be let
-   *         through
-   */
-  public static boolean filter(String value, Set<String> filters) {
-    return !(filters.isEmpty() || filters.contains(value));
-  }
-
-  /**
-   * Sorts the given list of service reports, most recently started
-   * or finished instance first.
-   *
-   * @param instances list of instances
-   */
-  public static void sortApplicationsByMostRecent(List<ApplicationReport> instances) {
-    Collections.sort(instances, new MostRecentlyStartedOrFinishedFirst());
-  }
-
-  /**
-   * Sorts the given list of service reports
-   * Finished instances are ordered by finished time and running/accepted instances are
-   * ordered by start time
-   * Finally Instance are order by finished instances coming after running instances
-   *
-   * @param instances list of instances
-   */
-  public static void sortApplicationReport(List<ApplicationReport> instances) {
-    if (instances.size() <= 1) {
-      return;
-    }
-    List<ApplicationReport> nonLiveInstance =
-        new ArrayList<>(instances.size());
-    List<ApplicationReport> liveInstance =
-        new ArrayList<>(instances.size());
-
-    for (ApplicationReport report : instances) {
-      if (report.getYarnApplicationState() == YarnApplicationState.RUNNING
-          ||
-          report.getYarnApplicationState() == YarnApplicationState.ACCEPTED) {
-        liveInstance.add(report);
-      } else {
-        nonLiveInstance.add(report);
-      }
-    }
-
-    if (liveInstance.size() > 1) {
-      Collections.sort(liveInstance, new MostRecentlyStartedAppFirst());
-    }
-    if (nonLiveInstance.size() > 1) {
-      Collections.sort(nonLiveInstance, new MostRecentAppFinishFirst());
-    }
-    instances.clear();
-    instances.addAll(liveInstance);
-    instances.addAll(nonLiveInstance);
-  }
-
-  /**
-   * Merge in one map to another -all entries in the second map are
-   * merged into the first -overwriting any duplicate keys.
-   * @param first first map -the updated one.
-   * @param second the map that is merged in
-   * @return the first map
-   */
-  public static Map<String, String> mergeMap(Map<String, String> first,
-      Map<String, String> second) {
-    first.putAll(second);
-    return first;
-  }
-
-  /**
-   * Merge a set of entries into a map. This will take the entryset of
-   * a map, or a Hadoop collection itself
-   * @param dest destination
-   * @param entries entries
-   * @return dest -with the entries merged in
-   */
-  public static Map<String, String> mergeEntries(Map<String, String> dest,
-      Iterable<Map.Entry<String, String>> entries) {
-    for (Map.Entry<String, String> entry : entries) {
-      dest.put(entry.getKey(), entry.getValue());
-    }
-    return dest;
-  }
-
-  /**
-   * Generic map merge logic
-   * @param first first map
-   * @param second second map
-   * @param <T1> key type
-   * @param <T2> value type
-   * @return 'first' merged with the second
-   */
-  public static <T1, T2> Map<T1, T2> mergeMaps(Map<T1, T2> first,
-      Map<T1, T2> second) {
-    first.putAll(second);
-    return first;
-  }
-
   /**
    * Generic map merge logic
    * @param first first map
@@ -763,125 +285,6 @@ public final class SliderUtils {
   }
 
   /**
-   * Parse an int value, replacing it with defval if undefined;
-   * @param errorKey key to use in exceptions
-   * @param defVal default value to use if the key is not in the map
-   * @param min min value or -1 for do not check
-   * @param max max value or -1 for do not check
-   * @return the int value the integer value
-   * @throws BadConfigException if the value could not be parsed
-   */
-  public static int parseAndValidate(String errorKey,
-      String valS,
-      int defVal,
-      int min, int max) throws
-      BadConfigException {
-    if (valS == null) {
-      valS = Integer.toString(defVal);
-    }
-    String trim = valS.trim();
-    int val;
-    try {
-      val = Integer.decode(trim);
-    } catch (NumberFormatException e) {
-      throw new BadConfigException("Failed to parse value of "
-                                   + errorKey + ": \"" + trim + "\"");
-    }
-    if (min >= 0 && val < min) {
-      throw new BadConfigException("Value of "
-                                   + errorKey + ": " + val + ""
-                                   + "is less than the minimum of " + min);
-    }
-    if (max >= 0 && val > max) {
-      throw new BadConfigException("Value of "
-                                   + errorKey + ": " + val + ""
-                                   + "is more than the maximum of " + max);
-    }
-    return val;
-  }
-
-  public static InetSocketAddress getRmAddress(Configuration conf) {
-    return conf.getSocketAddr(YarnConfiguration.RM_ADDRESS,
-        YarnConfiguration.DEFAULT_RM_ADDRESS,
-        YarnConfiguration.DEFAULT_RM_PORT);
-  }
-
-  public static InetSocketAddress getRmSchedulerAddress(Configuration conf) {
-    return conf.getSocketAddr(YarnConfiguration.RM_SCHEDULER_ADDRESS,
-        YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS,
-        YarnConfiguration.DEFAULT_RM_SCHEDULER_PORT);
-  }
-
-  /**
-   * probe to see if the RM scheduler is defined
-   * @param conf config
-   * @return true if the RM scheduler address is set to
-   * something other than 0.0.0.0
-   */
-  public static boolean isRmSchedulerAddressDefined(Configuration conf) {
-    InetSocketAddress address = getRmSchedulerAddress(conf);
-    return isAddressDefined(address);
-  }
-
-  /**
-   * probe to see if the address
-   * @param address network address
-   * @return true if the scheduler address is set to
-   * something other than 0.0.0.0
-   */
-  public static boolean isAddressDefined(InetSocketAddress address) {
-    if (address == null || address.getHostString() == null) {
-      return false;
-    }
-    return !(address.getHostString().equals("0.0.0.0"));
-  }
-
-  public static void setRmAddress(Configuration conf, String rmAddr) {
-    conf.set(YarnConfiguration.RM_ADDRESS, rmAddr);
-  }
-
-  public static void setRmSchedulerAddress(Configuration conf, String rmAddr) {
-    conf.set(YarnConfiguration.RM_SCHEDULER_ADDRESS, rmAddr);
-  }
-
-  public static boolean hasAppFinished(ApplicationReport report) {
-    return report == null ||
-           report.getYarnApplicationState().ordinal() >=
-           YarnApplicationState.FINISHED.ordinal();
-  }
-
-  public static String containerToString(Container container) {
-    if (container == null) {
-      return "null container";
-    }
-    return String.format(Locale.ENGLISH,
-        "ContainerID=%s nodeID=%s http=%s priority=%s resource=%s",
-        container.getId(),
-        container.getNodeId(),
-        container.getNodeHttpAddress(),
-        container.getPriority(),
-        container.getResource());
-  }
-
-  /**
-   * convert an AM report to a string for diagnostics
-   * @param report the report
-   * @return the string value
-   */
-  public static String reportToString(ApplicationReport report) {
-    if (report == null) {
-      return "Null service report";
-    }
-
-    return "App " + report.getName() + "/" + report.getApplicationType() +
-           "# " +
-           report.getApplicationId() + " user " + report.getUser() +
-           " is in state " + report.getYarnApplicationState() +
-           " RPC: " + report.getHost() + ":" + report.getRpcPort() +
-           " URL: " + report.getOriginalTrackingUrl();
-  }
-
-  /**
    * Convert a YARN URL into a string value of a normal URL
    * @param url URL
    * @return string representatin
@@ -949,151 +352,6 @@ public final class SliderUtils {
     return env;
   }
 
-  /**
-   * Apply a set of command line options to a cluster role map
-   * @param clusterRoleMap cluster role map to merge onto
-   * @param commandOptions command opts
-   */
-  public static void applyCommandLineRoleOptsToRoleMap(
-      Map<String, Map<String, String>> clusterRoleMap,
-      Map<String, Map<String, String>> commandOptions) {
-    for (Map.Entry<String, Map<String, String>> entry : commandOptions.entrySet()) {
-      String key = entry.getKey();
-      Map<String, String> optionMap = entry.getValue();
-      Map<String, String> existingMap = clusterRoleMap.get(key);
-      if (existingMap == null) {
-        existingMap = new HashMap<String, String>();
-      }
-      log.debug("Overwriting role options with command line values {}",
-          stringifyMap(optionMap));
-      mergeMap(existingMap, optionMap);
-      //set or overwrite the role
-      clusterRoleMap.put(key, existingMap);
-    }
-  }
-
-  /**
-   * Verify that a Kerberos principal has been set -if not fail
-   * with an error message that actually tells you what is missing
-   * @param conf configuration to look at
-   * @param principal key of principal
-   * @throws BadConfigException if the key is not set
-   */
-  public static void verifyPrincipalSet(Configuration conf,
-      String principal) throws
-      BadConfigException {
-    String principalName = conf.get(principal);
-    if (principalName == null) {
-      throw new BadConfigException("Unset Kerberos principal : %s",
-          principal);
-    }
-    log.debug("Kerberos princial {}={}", principal, principalName);
-  }
-
-  /**
-   * Flag to indicate whether the cluster is in secure mode
-   * @param conf configuration to look at
-   * @return true if the slider client/service should be in secure mode
-   */
-  public static boolean isHadoopClusterSecure(Configuration conf) {
-    return SecurityUtil.getAuthenticationMethod(conf) !=
-           UserGroupInformation.AuthenticationMethod.SIMPLE;
-  }
-
-  /**
-   * Init security if the cluster configuration declares the cluster is secure
-   * @param conf configuration to look at
-   * @return true if the cluster is secure
-   * @throws IOException cluster is secure
-   * @throws SliderException the configuration/process is invalid
-   */
-  public static boolean maybeInitSecurity(Configuration conf) throws
-      IOException,
-      SliderException {
-    boolean clusterSecure = isHadoopClusterSecure(conf);
-    if (clusterSecure) {
-      log.debug("Enabling security");
-      initProcessSecurity(conf);
-    }
-    return clusterSecure;
-  }
-
-  /**
-   * Turn on security. This is setup to only run once.
-   * @param conf configuration to build up security
-   * @return true if security was initialized in this call
-   * @throws IOException IO/Net problems
-   * @throws BadConfigException the configuration and system state are inconsistent
-   */
-  public static boolean initProcessSecurity(Configuration conf) throws
-      IOException,
-      SliderException {
-
-    if (processSecurityAlreadyInitialized.compareAndSet(true, true)) {
-      //security is already inited
-      return false;
-    }
-
-    log.info("JVM initialized into secure mode with kerberos realm {}",
-        SliderUtils.getKerberosRealm());
-    //this gets UGI to reset its previous world view (i.e simple auth)
-    //security
-    log.debug("java.security.krb5.realm={}",
-        System.getProperty(JAVA_SECURITY_KRB5_REALM, ""));
-    log.debug("java.security.krb5.kdc={}",
-        System.getProperty(JAVA_SECURITY_KRB5_KDC, ""));
-    log.debug("hadoop.security.authentication={}",
-        conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION));
-    log.debug("hadoop.security.authorization={}",
-        conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION));
-    UserGroupInformation.setConfiguration(conf);
-    UserGroupInformation authUser = UserGroupInformation.getCurrentUser();
-    log.debug("Authenticating as {}", authUser);
-    log.debug("Login user is {}", UserGroupInformation.getLoginUser());
-    if (!UserGroupInformation.isSecurityEnabled()) {
-      throw new SliderException(LauncherExitCodes.EXIT_UNAUTHORIZED,
-          "Although secure mode is enabled," +
-         "the service has already set up its user as an insecure entity %s",
-          authUser);
-    }
-    if (authUser.getAuthenticationMethod() ==
-        UserGroupInformation.AuthenticationMethod.SIMPLE) {
-      throw new BadConfigException("Auth User is not Kerberized %s" +
-         " -security has already been set up with the wrong authentication method. "
-         + "This can occur if a file system has already been created prior to the loading of "
-         + "the security configuration.",
-          authUser);
-
-    }
-
-    SliderUtils.verifyPrincipalSet(conf, YarnConfiguration.RM_PRINCIPAL);
-    SliderUtils.verifyPrincipalSet(conf, "dfs.namenode.kerberos.principal");
-    return true;
-  }
-
-  /**
-   * Force an early login: This catches any auth problems early rather than
-   * in RPC operations
-   * @throws IOException if the login fails
-   */
-  public static void forceLogin() throws IOException {
-    if (UserGroupInformation.isSecurityEnabled()) {
-      if (UserGroupInformation.isLoginKeytabBased()) {
-        UserGroupInformation.getLoginUser().reloginFromKeytab();
-      } else {
-        UserGroupInformation.getLoginUser().reloginFromTicketCache();
-      }
-    }
-  }
-
-  public static String getLibDir() {
-    String[] libDirs = getLibDirs();
-    if (libDirs == null || libDirs.length == 0) {
-      return null;
-    }
-    return libDirs[0];
-  }
-
   public static String[] getLibDirs() {
     String libDirStr = System.getProperty(YarnServiceConstants.PROPERTY_LIB_DIR);
     if (isUnset(libDirStr)) {
@@ -1183,39 +441,6 @@ public final class SliderUtils {
     sliderFileSystem.submitTarGzipAndUpdate(providerResources);
   }
 
-  public static Map<String, Map<String, String>> deepClone(Map<String, Map<String, String>> src) {
-    Map<String, Map<String, String>> dest = new HashMap<>();
-    for (Map.Entry<String, Map<String, String>> entry : src.entrySet()) {
-      dest.put(entry.getKey(), stringMapClone(entry.getValue()));
-    }
-    return dest;
-  }
-
-  public static Map<String, String> stringMapClone(Map<String, String> src) {
-    Map<String, String> dest = new HashMap<>();
-    return mergeEntries(dest, src.entrySet());
-  }
-
-  /**
-   * List a directory in the local filesystem
-   * @param dir directory
-   * @return a listing, one to a line
-   */
-  public static String listDir(File dir) {
-    if (dir == null) {
-      return "";
-    }
-    String[] confDirEntries = dir.list();
-    if (confDirEntries == null) {
-      return "";
-    }
-    StringBuilder builder = new StringBuilder();
-    for (String entry : confDirEntries) {
-      builder.append(entry).append("\n");
-    }
-    return builder.toString();
-  }
-
   /**
    * Create a file:// path from a local file
    * @param file file to point the path
@@ -1225,16 +450,6 @@ public final class SliderUtils {
     return new Path(file.toURI());
   }
 
-  public static String getKerberosRealm() {
-    try {
-      return KerberosUtil.getDefaultRealm();
-    } catch (Exception e) {
-      log.debug("introspection into JVM internals failed", e);
-      return "(unknown)";
-
-    }
-  }
-
   /**
    * Build up the classpath for execution
    * -behaves very differently on a mini test cluster vs a production
@@ -1279,171 +494,6 @@ public final class SliderUtils {
   }
 
   /**
-   * Verify that a path refers to a directory. If not
-   * logs the parent dir then throws an exception
-   * @param dir the directory
-   * @param errorlog log for output on an error
-   * @throws FileNotFoundException if it is not a directory
-   */
-  public static void verifyIsDir(File dir, Logger errorlog) throws
-      FileNotFoundException {
-    if (!dir.exists()) {
-      errorlog.warn("contents of {}: {}", dir,
-          listDir(dir.getParentFile()));
-      throw new FileNotFoundException(dir.toString());
-    }
-    if (!dir.isDirectory()) {
-      errorlog.info("contents of {}: {}", dir,
-          listDir(dir.getParentFile()));
-      throw new FileNotFoundException(
-          "Not a directory: " + dir);
-    }
-  }
-
-  /**
-   * Verify that a file exists
-   * @param file file
-   * @param errorlog log for output on an error
-   * @throws FileNotFoundException
-   */
-  public static void verifyFileExists(File file, Logger errorlog) throws
-      FileNotFoundException {
-    if (!file.exists()) {
-      errorlog.warn("contents of {}: {}", file,
-          listDir(file.getParentFile()));
-      throw new FileNotFoundException(file.toString());
-    }
-    if (!file.isFile()) {
-      throw new FileNotFoundException("Not a file: " + file.toString());
-    }
-  }
-
-  /**
-   * verify that a config option is set
-   * @param configuration config
-   * @param key key
-   * @return the value, in case it needs to be verified too
-   * @throws BadConfigException if the key is missing
-   */
-  public static String verifyOptionSet(Configuration configuration, String key,
-      boolean allowEmpty) throws BadConfigException {
-    String val = configuration.get(key);
-    if (val == null) {
-      throw new BadConfigException(
-          "Required configuration option \"%s\" not defined ", key);
-    }
-    if (!allowEmpty && val.isEmpty()) {
-      throw new BadConfigException(
-          "Configuration option \"%s\" must not be empty", key);
-    }
-    return val;
-  }
-
-  /**
-   * Verify that a keytab property is defined and refers to a non-empty file
-   *
-   * @param siteConf configuration
-   * @param prop property to look for
-   * @return the file referenced
-   * @throws BadConfigException on a failure
-   */
-  public static File verifyKeytabExists(Configuration siteConf,
-      String prop) throws
-      BadConfigException {
-    String keytab = siteConf.get(prop);
-    if (keytab == null) {
-      throw new BadConfigException("Missing keytab property %s",
-          prop);
-
-    }
-    File keytabFile = new File(keytab);
-    if (!keytabFile.exists()) {
-      throw new BadConfigException("Missing keytab file %s defined in %s",
-          keytabFile,
-          prop);
-    }
-    if (keytabFile.length() == 0 || !keytabFile.isFile()) {
-      throw new BadConfigException("Invalid keytab file %s defined in %s",
-          keytabFile,
-          prop);
-    }
-    return keytabFile;
-  }
-
-  /**
-   * Add a subpath to an existing URL. This extends
-   * the path, inserting a / between all entries
-   * if needed.
-   * @param base base path/URL
-   * @param path subpath
-   * @return base+"/"+subpath
-   */
-  public static String appendToURL(String base, String path) {
-    StringBuilder fullpath = new StringBuilder(base);
-    if (!base.endsWith("/")) {
-      fullpath.append("/");
-    }
-    if (path.startsWith("/")) {
-      fullpath.append(path.substring(1));
-    } else {
-      fullpath.append(path);
-    }
-    return fullpath.toString();
-  }
-
-  /**
-   * Truncate the given string to a maximum length provided
-   * with a pad (...) added to the end if expected size if more than 10.
-   * @param toTruncate string to truncate; may be null
-   * @param maxSize maximum size
-   * @return the truncated/padded string. 
-   */
-  public static String truncate(String toTruncate, int maxSize) {
-    if (toTruncate == null || maxSize < 1
-        || toTruncate.length() <= maxSize) {
-      return toTruncate;
-    }
-
-    String pad = "...";
-    if (maxSize < 10) {
-      pad = "";
-    }
-    return toTruncate.substring(0, maxSize - pad.length()).concat(pad);
-  }
-
-  /**
-   * Given a source folder create zipped file
-   *
-   * @param srcFolder
-   * @param zipFile
-   *
-   * @throws IOException
-   */
-  public static void zipFolder(File srcFolder, File zipFile) throws IOException {
-    log.info("Zipping folder {} to {}", srcFolder.getAbsolutePath(), zipFile.getAbsolutePath());
-    List<String> files = new ArrayList<>();
-    generateFileList(files, srcFolder, srcFolder, true);
-
-    byte[] buffer = new byte[1024];
-
-    try (FileOutputStream fos = new FileOutputStream(zipFile)) {
-      try (ZipOutputStream zos = new ZipOutputStream(fos)) {
-
-        for (String file : files) {
-          ZipEntry ze = new ZipEntry(file);
-          zos.putNextEntry(ze);
-          try (FileInputStream in = new FileInputStream(srcFolder + File.separator + file)) {
-            int len;
-            while ((len = in.read(buffer)) > 0) {
-              zos.write(buffer, 0, len);
-            }
-          }
-        }
-      }
-    }
-  }
-
-  /**
    * Given a source folder create a tar.gz file
    * 
    * @param libDirs
@@ -1479,11 +529,6 @@ public final class SliderUtils {
   }
 
   private static void generateFileList(List<String> fileList, File node,
-      File rootFolder, Boolean relative) {
-    generateFileList(fileList, node, rootFolder, relative, null);
-  }
-
-  private static void generateFileList(List<String> fileList, File node,
       File rootFolder, Boolean relative, FilenameFilter filter) {
     if (node.isFile()) {
       String fileFullPath = node.toString();
@@ -1507,134 +552,6 @@ public final class SliderUtils {
     }
   }
 
-  /**
-   * Check for any needed libraries being present. On Unix none are needed;
-   * on windows they must be present
-   * @return true if all is well
-   */
-  public static String checkForRequiredNativeLibraries() {
-
-    if (!Shell.WINDOWS) {
-      return "";
-    }
-    StringBuilder errorText = new StringBuilder("");
-    if (!NativeIO.isAvailable()) {
-      errorText.append("No native IO library. ");
-    }
-    try {
-      String path = Shell.getQualifiedBinPath(WINUTILS);
-      log.debug("winutils is at {}", path);
-    } catch (IOException e) {
-      errorText.append("No " + WINUTILS);
-      log.warn("No winutils: {}", e, e);
-    }
-    try {
-      File target = new File("target");
-      FileUtil.canRead(target);
-    } catch (UnsatisfiedLinkError e) {
-      log.warn("Failing to link to native IO methods: {}", e, e);
-      errorText.append("No native IO methods");
-    }
-    return errorText.toString();
-  }
-
-  /**
-   * Strictly verify that windows utils is present.
-   * Checks go as far as opening the file and looking for
-   * the headers. 
-   * @throws IOException on any problem reading the file
-   * @throws FileNotFoundException if the file is not considered valid
-   */
-  public static void maybeVerifyWinUtilsValid() throws
-      IOException,
-      SliderException {
-    String errorText = SliderUtils.checkForRequiredNativeLibraries();
-    if (!errorText.isEmpty()) {
-      throw new BadClusterStateException(errorText);
-    }
-  }
-
-  /**
-   * Write bytes to a file
-   * @param outfile output file
-   * @param data data to write
-   * @throws IOException on any IO problem
-   */
-  public static void write(File outfile, byte[] data)
-      throws IOException {
-    File parentDir = outfile.getCanonicalFile().getParentFile();
-    if (parentDir == null) {
-      throw new IOException(outfile.getPath() + " has no parent dir");
-    }
-    if (!parentDir.exists()) {
-      if(!parentDir.mkdirs()) {
-        throw new IOException("Failed to create parent directory " + parentDir);
-      }
-    }
-    SliderUtils.verifyIsDir(parentDir, log);
-    try(FileOutputStream out = new FileOutputStream(outfile)) {
-      out.write(data);
-    }
-  }
-
-  /**
-   * Compare the times of two applications: most recent app comes first
-   * Specifically: the one whose start time value is greater.
-   */
-  private static class MostRecentlyStartedAppFirst
-      implements Comparator<ApplicationReport>, Serializable {
-    @Override
-    public int compare(ApplicationReport r1, ApplicationReport r2) {
-      long x = r1.getStartTime();
-      long y = r2.getStartTime();
-      return compareTwoLongsReverse(x, y);
-    }
-  }
-  
-  /**
-   * Compare the times of two applications: most recent app comes first.
-   * "Recent"== the app whose start time <i>or finish time</i> is the greatest.
-   */
-  private static class MostRecentlyStartedOrFinishedFirst
-      implements Comparator<ApplicationReport>, Serializable {
-    @Override
-    public int compare(ApplicationReport r1, ApplicationReport r2) {
-      long started1 = r1.getStartTime();
-      long started2 = r2.getStartTime();
-      long finished1 = r1.getFinishTime();
-      long finished2 = r2.getFinishTime();
-      long lastEvent1 = Math.max(started1, finished1);
-      long lastEvent2 = Math.max(started2, finished2);
-      return compareTwoLongsReverse(lastEvent1, lastEvent2);
-    }
-  }
-
-  /**
-   * Compare the times of two applications: most recently finished app comes first
-   * Specifically: the one whose finish time value is greater.
-   */
-  private static class MostRecentAppFinishFirst
-      implements Comparator<ApplicationReport>, Serializable {
-    @Override
-    public int compare(ApplicationReport r1, ApplicationReport r2) {
-      long x = r1.getFinishTime();
-      long y = r2.getFinishTime();
-      return compareTwoLongsReverse(x, y);
-    }
-  }
-
-  /**
-   * Compare two long values for sorting. As the return value for 
-   * comparators must be int, the simple value of <code>x-y</code>
-   * is inapplicable
-   * @param x x value
-   * @param y y value
-   * @return +ve if x is less than y, -ve if y is greater than x; 0 for equality
-   */
-  public static int compareTwoLongsReverse(long x, long y) {
-    return (x < y) ? 1 : ((x == y) ? 0 : -1);
-  }
-
   public static String createNameTag(String name) {
     return "Name: " + name;
   }
@@ -1646,9 +563,4 @@ public final class SliderUtils {
   public static String createDescriptionTag(String description) {
     return "Description: " + description;
   }
-
-  public static final String DAYS = ".days";
-  public static final String HOURS = ".hours";
-  public static final String MINUTES = ".minutes";
-  public static final String SECONDS = ".seconds";
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
index 0f4f598..a2edbc8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
@@ -18,12 +18,22 @@
 
 package org.apache.hadoop.yarn.service;
 
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.api.records.Component;
 import org.apache.hadoop.yarn.service.api.records.Resource;
 import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
+import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.codehaus.jackson.map.PropertyNamingStrategy;
 
+import java.io.IOException;
+
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
 public class ServiceTestUtils {
 
   public static final JsonSerDeser<Service> JSON_SER_DESER =
@@ -56,4 +66,22 @@ public class ServiceTestUtils {
     resource.setCpus(1);
     return comp1;
   }
+
+  public static SliderFileSystem initMockFs() throws IOException {
+    return initMockFs(null);
+  }
+
+  public static SliderFileSystem initMockFs(Service ext) throws IOException {
+    SliderFileSystem sfs = mock(SliderFileSystem.class);
+    FileSystem mockFs = mock(FileSystem.class);
+    JsonSerDeser<Service> jsonSerDeser = mock(JsonSerDeser.class);
+    when(sfs.getFileSystem()).thenReturn(mockFs);
+    when(sfs.buildClusterDirPath(anyObject())).thenReturn(
+        new Path("cluster_dir_path"));
+    if (ext != null) {
+      when(jsonSerDeser.load(anyObject(), anyObject())).thenReturn(ext);
+    }
+    ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
+    return sfs;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
index be36335..959e4d6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
@@ -18,8 +18,6 @@
 package org.apache.hadoop.yarn.service;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.registry.client.api.RegistryConstants;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.service.api.records.Service;
@@ -27,7 +25,6 @@ import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
 import org.apache.hadoop.yarn.service.api.records.Artifact;
 import org.apache.hadoop.yarn.service.api.records.Component;
 import org.apache.hadoop.yarn.service.api.records.Resource;
-import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
 import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.junit.Assert;
@@ -44,10 +41,8 @@ import java.util.List;
 import static org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_COMPONENT_NAME;
 import static org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_UNLIMITED_LIFETIME;
 import static org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages.*;
-import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
 
 /**
  * Test for ServiceApiUtil helper methods.
@@ -78,7 +73,7 @@ public class TestServiceApiUtil {
     assertEquals(RegistryConstants.MAX_FQDN_LABEL_LENGTH + 1, LEN_64_STR
         .length());
 
-    SliderFileSystem sfs = initMock(null);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
 
     Service app = new Service();
 
@@ -230,7 +225,7 @@ public class TestServiceApiUtil {
 
   @Test
   public void testArtifacts() throws IOException {
-    SliderFileSystem sfs = initMock(null);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
 
     Service app = new Service();
     app.setName("name");
@@ -309,27 +304,10 @@ public class TestServiceApiUtil {
     return app;
   }
 
-  private static SliderFileSystem initMock(Service ext) throws IOException {
-    SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
-    FileSystem mockFs = createNiceMock(FileSystem.class);
-    JsonSerDeser<Service> jsonSerDeser = createNiceMock(JsonSerDeser
-        .class);
-    expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
-    expect(sfs.buildClusterDirPath(anyObject())).andReturn(
-        new Path("cluster_dir_path")).anyTimes();
-    if (ext != null) {
-      expect(jsonSerDeser.load(anyObject(), anyObject())).andReturn(ext)
-          .anyTimes();
-    }
-    replay(sfs, mockFs, jsonSerDeser);
-    ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
-    return sfs;
-  }
-
   @Test
   public void testExternalApplication() throws IOException {
     Service ext = createValidApplication("comp1");
-    SliderFileSystem sfs = initMock(ext);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext);
 
     Service app = createValidApplication(null);
 
@@ -350,7 +328,7 @@ public class TestServiceApiUtil {
 
   @Test
   public void testDuplicateComponents() throws IOException {
-    SliderFileSystem sfs = initMock(null);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
 
     String compName = "comp1";
     Service app = createValidApplication(compName);
@@ -368,7 +346,7 @@ public class TestServiceApiUtil {
   @Test
   public void testExternalDuplicateComponent() throws IOException {
     Service ext = createValidApplication("comp1");
-    SliderFileSystem sfs = initMock(ext);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext);
 
     Service app = createValidApplication("comp1");
     Artifact artifact = new Artifact();
@@ -387,7 +365,7 @@ public class TestServiceApiUtil {
   @Test
   public void testExternalComponent() throws IOException {
     Service ext = createValidApplication("comp1");
-    SliderFileSystem sfs = initMock(ext);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext);
 
     Service app = createValidApplication("comp2");
     Artifact artifact = new Artifact();
@@ -454,7 +432,7 @@ public class TestServiceApiUtil {
               e)), ex.getMessage());
     }
 
-    SliderFileSystem sfs = initMock(null);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
     Service service = createValidApplication(null);
     service.setComponents(Arrays.asList(c, d, e));
     try {
@@ -470,7 +448,7 @@ public class TestServiceApiUtil {
 
   @Test
   public void testInvalidComponent() throws IOException {
-    SliderFileSystem sfs = initMock(null);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
     testComponent(sfs);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
index 63aa9c6..30f2aeb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.yarn.service;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.curator.test.TestingCluster;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -51,6 +49,8 @@ import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -78,8 +78,8 @@ import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_B
  */
 public class TestYarnNativeServices extends ServiceTestUtils{
 
-  private static final Log LOG =
-      LogFactory.getLog(TestYarnNativeServices.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(TestYarnNativeServices.class);
 
   private MiniYARNCluster yarnCluster = null;
   private MiniDFSCluster hdfsCluster = null;
@@ -416,7 +416,7 @@ public class TestYarnNativeServices extends ServiceTestUtils{
         LOG.info("Num Components " + retrievedApp.getComponents().size());
         for (Component component : retrievedApp.getComponents()) {
           LOG.info("looking for  " + component.getName());
-          LOG.info(component);
+          LOG.info(component.toString());
           if (component.getContainers() != null) {
             if (component.getContainers().size() == exampleApp
                 .getComponent(component.getName()).getNumberOfContainers()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
index ecc529d..c53ee2b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
@@ -45,7 +45,6 @@ import static org.apache.hadoop.yarn.conf.YarnConfiguration.RESOURCEMANAGER_CONN
 import static org.apache.hadoop.yarn.conf.YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_MS;
 import static org.apache.hadoop.yarn.service.client.params.Arguments.ARG_APPDEF;
 import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_BASE_PATH;
-import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.*;
 
 public class TestServiceCLI {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
index 04ec526..8739382 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
@@ -18,13 +18,11 @@
 
 package org.apache.hadoop.yarn.service.conf;
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.service.ServiceTestUtils;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.api.records.ConfigFile;
 import org.apache.hadoop.yarn.service.api.records.Configuration;
-import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
 import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.junit.Assert;
@@ -40,7 +38,6 @@ import java.util.Map;
 import java.util.Set;
 
 import static org.apache.hadoop.yarn.service.conf.ExampleAppJson.*;
-import static org.easymock.EasyMock.*;
 
 /**
  * Test global configuration resolution.
@@ -78,12 +75,7 @@ public class TestAppJsonResolve extends Assert {
     assertEquals("1000", worker.getProperty("timeout"));
 
     // here is the resolution
-    SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
-    FileSystem mockFs = createNiceMock(FileSystem.class);
-    expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
-    expect(sfs.buildClusterDirPath(anyObject())).andReturn(
-        new Path("cluster_dir_path")).anyTimes();
-    replay(sfs, mockFs);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
     ServiceApiUtil.validateAndResolveService(orig, sfs, new
         YarnConfiguration());
 
@@ -162,27 +154,13 @@ public class TestAppJsonResolve extends Assert {
     assertEquals(0, other.getProperties().size());
 
     // load the external service
-    SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
-    FileSystem mockFs = createNiceMock(FileSystem.class);
-    expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
-    expect(sfs.buildClusterDirPath(anyObject())).andReturn(
-        new Path("cluster_dir_path")).anyTimes();
-    replay(sfs, mockFs);
+    SliderFileSystem sfs = ServiceTestUtils.initMockFs();
     Service ext = ExampleAppJson.loadResource(APP_JSON);
     ServiceApiUtil.validateAndResolveService(ext, sfs, new
         YarnConfiguration());
-    reset(sfs, mockFs);
 
     // perform the resolution on original service
-    JsonSerDeser<Service> jsonSerDeser = createNiceMock(JsonSerDeser
-        .class);
-    expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
-    expect(sfs.buildClusterDirPath(anyObject())).andReturn(
-        new Path("cluster_dir_path")).anyTimes();
-    expect(jsonSerDeser.load(anyObject(), anyObject())).andReturn(ext)
-        .anyTimes();
-    replay(sfs, mockFs, jsonSerDeser);
-    ServiceApiUtil.setJsonSerDeser(jsonSerDeser);
+    sfs = ServiceTestUtils.initMockFs(ext);
     ServiceApiUtil.validateAndResolveService(orig, sfs, new
         YarnConfiguration());
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
index 83e9502..a813da3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
@@ -18,9 +18,8 @@
 
 package org.apache.hadoop.yarn.service.conf;
 
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.service.ServiceTestUtils;
 import org.apache.hadoop.yarn.service.api.records.Service;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
 import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
@@ -33,7 +32,6 @@ import java.util.Arrays;
 import java.util.Collection;
 
 import static org.apache.hadoop.yarn.service.ServiceTestUtils.JSON_SER_DESER;
-import static org.easymock.EasyMock.*;
 
 /**
  * Test loading example resources.
@@ -62,12 +60,7 @@ public class TestLoadExampleAppJson extends Assert {
     try {
       Service service = JSON_SER_DESER.fromResource(resource);
 
-      SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
-      FileSystem mockFs = createNiceMock(FileSystem.class);
-      expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
-      expect(sfs.buildClusterDirPath(anyObject())).andReturn(
-          new Path("cluster_dir_path")).anyTimes();
-      replay(sfs, mockFs);
+      SliderFileSystem sfs = ServiceTestUtils.initMockFs();
 
       ServiceApiUtil.validateAndResolveService(service, sfs,
           new YarnConfiguration());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
index 5b24a1d..79406e9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.yarn.service.providers;
 
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.service.api.records.Artifact;
 import org.apache.hadoop.yarn.service.api.records.ConfigFile;
 import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
@@ -29,7 +28,9 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.easymock.EasyMock.*;
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Test the AbstractClientProvider shared methods.
@@ -55,9 +56,8 @@ public class TestAbstractClientProvider {
   @Test
   public void testConfigFiles() throws IOException {
     ClientProvider clientProvider = new ClientProvider();
-    FileSystem mockFs = createNiceMock(FileSystem.class);
-    expect(mockFs.exists(anyObject(Path.class))).andReturn(true).anyTimes();
-    replay(mockFs);
+    FileSystem mockFs = mock(FileSystem.class);
+    when(mockFs.exists(anyObject())).thenReturn(true);
 
     ConfigFile configFile = new ConfigFile();
     List<ConfigFile> configFiles = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/pom.xml b/hadoop-yarn-project/pom.xml
index 3cbbaa7..1b3c5f0 100644
--- a/hadoop-yarn-project/pom.xml
+++ b/hadoop-yarn-project/pom.xml
@@ -82,6 +82,10 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-server-router</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-services-core</artifactId>
+    </dependency>
   </dependencies>
 
   <build>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org