You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2017/09/05 05:10:39 UTC

[15/51] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
deleted file mode 100644
index 3e9b764..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.rest.publisher;
-
-import org.apache.hadoop.yarn.webapp.NotFoundException;
-import org.apache.slider.core.registry.docstore.ConfigFormat;
-import org.apache.slider.core.registry.docstore.PublishedConfigSet;
-import org.apache.slider.core.registry.docstore.PublishedConfiguration;
-import org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
-import org.apache.slider.core.registry.docstore.PublishedExports;
-import org.apache.slider.core.registry.docstore.PublishedExportsSet;
-import org.apache.slider.core.registry.docstore.UriMap;
-import org.apache.slider.server.appmaster.state.StateAccessForProviders;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.rest.AbstractSliderResource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.slider.server.appmaster.web.rest.RestPaths.PUBLISHED_CONFIGURATION_REGEXP;
-import static org.apache.slider.server.appmaster.web.rest.RestPaths.PUBLISHED_CONFIGURATION_SET_REGEXP;
-
-/**
- * This publishes configuration sets
- */
-public class PublisherResource extends AbstractSliderResource {
-  protected static final Logger log =
-      LoggerFactory.getLogger(PublisherResource.class);
-  public static final String EXPORTS_NAME = "exports";
-  public static final String EXPORTS_RESOURCES_PATH = "/" + EXPORTS_NAME;
-  public static final String EXPORT_RESOURCE_PATH = EXPORTS_RESOURCES_PATH + "/{exportname}" ;
-  public static final String SET_NAME =
-      "{setname: " + PUBLISHED_CONFIGURATION_SET_REGEXP + "}";
-  public static final String SETNAME = "setname";
-  public static final String CLASSPATH = "/classpath";
-  public static final String CONFIG = "config";
-  
-  public static final String SETNAME_PATTERN = 
-      "{"+ SETNAME+": " + PUBLISHED_CONFIGURATION_SET_REGEXP + "}";
-  private static final String CONFIG_PATTERN =
-      SETNAME_PATTERN + "/{"+ CONFIG +": " + PUBLISHED_CONFIGURATION_REGEXP + "}";
-  private final StateAccessForProviders appState;
-
-  public PublisherResource(WebAppApi slider) {
-    super(slider);
-    appState = slider.getAppState();
-  }
-
-  private void init(HttpServletResponse res, UriInfo uriInfo) {
-    res.setContentType(null);
-    log.debug(uriInfo.getRequestUri().toString());
-  }
- 
-  /**
-   * Get a named config set 
-   * @param setname name of the config set
-   * @return the config set
-   * @throws NotFoundException if there was no matching set
-   */
-  private PublishedConfigSet getConfigSet(String setname) {
-    PublishedConfigSet configSet =
-        appState.getPublishedConfigSet(setname);
-    if (configSet == null) {
-      throw new NotFoundException("Not found: " + setname);
-    }
-    return configSet;
-  }
-
-  @GET
-  @Path("/")
-  @Produces({MediaType.APPLICATION_JSON})
-  public UriMap enumConfigSets(
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) {
-    init(res, uriInfo);
-    String baseURL = uriInfo.getRequestUri().toString();
-    if (!baseURL.endsWith("/")) {
-      baseURL += "/";
-    }
-    UriMap uriMap = new UriMap();
-    for (String name : appState.listConfigSets()) {
-      uriMap.put(name, baseURL + name);
-      log.info("registering config set {} at {}", name, baseURL);
-    }
-    uriMap.put(EXPORTS_NAME, baseURL + EXPORTS_NAME);
-    return uriMap;
-  }
-
-  @GET
-  @Path(CLASSPATH)
-  @Produces({MediaType.APPLICATION_JSON})
-  public List<URL> getAMClassPath() {
-    URL[] urls = ((URLClassLoader) getClass().getClassLoader()).getURLs();
-    return Arrays.asList(urls);
-  }
-
-  @GET
-  @Path(EXPORTS_RESOURCES_PATH)
-  @Produces({MediaType.APPLICATION_JSON})
-  public PublishedExportsSet gePublishedExports() {
-
-    return appState.getPublishedExportsSet();
-  }
-
-  @GET
-  @Path(EXPORT_RESOURCE_PATH)
-  @Produces({MediaType.APPLICATION_JSON})
-  public PublishedExports getAMExports2(@PathParam("exportname") String exportname,
-                              @Context UriInfo uriInfo,
-                              @Context HttpServletResponse res) {
-    init(res, uriInfo);
-    PublishedExportsSet set = appState.getPublishedExportsSet();
-    return set.get(exportname);
-  }
-
-  @GET
-  @Path("/"+ SETNAME_PATTERN)
-  @Produces({MediaType.APPLICATION_JSON})
-  public PublishedConfigSet getPublishedConfiguration(
-      @PathParam(SETNAME) String setname,
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) {
-    init(res, uriInfo);
-
-    logRequest(uriInfo);
-    PublishedConfigSet publishedConfigSet = getConfigSet(setname);
-    log.debug("Number of configurations: {}", publishedConfigSet.size());
-    return publishedConfigSet.shallowCopy();
-  }
-
-  private void logRequest(UriInfo uriInfo) {
-    log.info(uriInfo.getRequestUri().toString());
-  }
-
-  @GET
-  @Path("/" + CONFIG_PATTERN)
-  @Produces({MediaType.APPLICATION_JSON})
-  public PublishedConfiguration getConfigurationInstance(
-      @PathParam(SETNAME) String setname,
-      @PathParam(CONFIG) String config,
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) {
-    init(res, uriInfo);
-
-    PublishedConfiguration publishedConfig =
-        getPublishedConfiguration(setname, config);
-    if (publishedConfig == null) {
-      log.info("Configuration {} not found", config);
-      throw new NotFoundException("Not found: " + uriInfo.getAbsolutePath());
-    }
-    return publishedConfig;
-  }
-
-  /**
-   * Get a configuration
-   * @param setname name of the config set
-   * @param config config
-   * @return null if there was a config, but not a set
-   * @throws NotFoundException if there was no matching set
-   */
-  public PublishedConfiguration getPublishedConfiguration(String setname,
-      String config) {
-    return getConfigSet(setname).get(config);
-  }
-
-  @GET
-  @Path("/" + CONFIG_PATTERN + ".json")
-  @Produces({MediaType.APPLICATION_JSON})
-  public String getConfigurationContentJson(
-      @PathParam(SETNAME) String setname,
-
-      @PathParam(CONFIG) String config,
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) throws IOException {
-    return getStringRepresentation(setname, config, uriInfo, res,
-        ConfigFormat.JSON);
-  }
-
-  @GET
-  @Path("/" + CONFIG_PATTERN + ".xml")
-  @Produces({MediaType.APPLICATION_XML})
-  public String getConfigurationContentXML(
-      @PathParam(SETNAME) String setname,
-      @PathParam(CONFIG) String config,
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) throws IOException {
-    return getStringRepresentation(setname, config, uriInfo, res,
-        ConfigFormat.XML);
-  }
-  
-  @GET
-  @Path("/" + CONFIG_PATTERN + ".properties")
-  @Produces({MediaType.APPLICATION_XML})
-  public String getConfigurationContentProperties(
-      @PathParam(SETNAME) String setname,
-
-      @PathParam(CONFIG) String config,
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) throws IOException {
-
-    return getStringRepresentation(setname, config, uriInfo, res,
-        ConfigFormat.PROPERTIES);
-  }
-
-  public String getStringRepresentation(String setname,
-      String config,
-      UriInfo uriInfo,
-      HttpServletResponse res, ConfigFormat format) throws IOException {
-    // delegate (including init)
-    PublishedConfiguration publishedConfig =
-        getConfigurationInstance(setname, config, uriInfo, res);
-    PublishedConfigurationOutputter outputter =
-        publishedConfig.createOutputter(format);
-    return outputter.asString();
-  }
-
-  @GET
-  @Path("/" + CONFIG_PATTERN +"/{propertyName}")
-  @Produces({MediaType.APPLICATION_JSON})
-  public Map<String,String> getConfigurationProperty(
-      @PathParam(SETNAME) String setname,
-      @PathParam(CONFIG) String config,
-      @PathParam("propertyName") String propertyName,
-      @Context UriInfo uriInfo,
-      @Context HttpServletResponse res) {
-    PublishedConfiguration publishedConfig =
-        getConfigurationInstance(setname, config, uriInfo, res);
-    String propVal = publishedConfig.entries.get(propertyName);
-    if (propVal == null) {
-      log.debug("Configuration property {} not found in configuration {}",
-          propertyName, config);
-      throw new NotFoundException("Property not found: " + propertyName);
-    }
-    Map<String, String> rtnVal = new HashMap<>();
-    rtnVal.put(propertyName, propVal);
-
-    return rtnVal;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryResource.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryResource.java
deleted file mode 100644
index efb09a8..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryResource.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.rest.registry;
-
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-import java.util.List;
-
-/**
- * Representation of a path entry
- */
-@JsonIgnoreProperties(ignoreUnknown = true)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-public class PathEntryResource {
-
-  /**
-   * Child nodes: as the short path to each element
-   */
-  public List<String> nodes;
-
-  /**
-   * Service record: if null —there is no resolvable service
-   * record at this node.
-   */
-  public ServiceRecord service;
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/RegistryResource.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/RegistryResource.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/RegistryResource.java
deleted file mode 100644
index c824848..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/registry/RegistryResource.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.rest.registry;
-
-import com.google.inject.Singleton;
-import org.apache.hadoop.fs.PathNotFoundException;
-import org.apache.hadoop.registry.client.api.RegistryOperations;
-import org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException;
-import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
-import org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException;
-import org.apache.hadoop.registry.client.exceptions.NoRecordException;
-import org.apache.hadoop.yarn.webapp.ForbiddenException;
-import org.apache.hadoop.yarn.webapp.NotFoundException;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.rest.AbstractSliderResource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
-
-/**
- * This is the read-only view of the YARN registry.
- * 
- * Model:
- * <ol>
- *   <li>a tree of nodes</li>
- *   <li>Default view is of children + record</li>
- * </ol>
- * 
- */
-@Singleton
-public class RegistryResource extends AbstractSliderResource {
-  protected static final Logger log =
-      LoggerFactory.getLogger(RegistryResource.class);
-  public static final String SERVICE_PATH =
-      "/{path:.*}";
-
-  private final RegistryOperations registry;
-
-  /**
-   * Construct an instance bonded to a registry
-   * @param slider slider API
-   */
-  public RegistryResource(WebAppApi slider) {
-    super(slider);
-    this.registry = slider.getRegistryOperations();
-  }
-
-  
-  /**
-   * Internal init code, per request
-   * @param request incoming request 
-   * @param uriInfo URI details
-   */
-  private void init(HttpServletRequest request, UriInfo uriInfo) {
-    log.debug(uriInfo.getRequestUri().toString());
-  }
-
-  @GET
-  @Produces({MediaType.APPLICATION_JSON})
-  public PathEntryResource getRoot(@Context HttpServletRequest request,
-      @Context UriInfo uriInfo) {
-    return lookup("/", request, uriInfo);
-  }
-
-//   {path:.*}
-
-  @Path(SERVICE_PATH)
-  @GET
-  @Produces({MediaType.APPLICATION_JSON})
-  public PathEntryResource lookup(
-      @PathParam("path") String path,
-      @Context HttpServletRequest request,
-      @Context UriInfo uriInfo) {
-      init(request, uriInfo);
-      return resolvePath(path);
-  }
-
-  /**
-   * Do the actual processing of requests to responses; can be directly
-   * invoked for testing.
-   * @param path path to query
-   * @return the entry
-   * @throws WebApplicationException on any failure.
-   */
-  public PathEntryResource resolvePath(String path) throws
-      WebApplicationException {
-    try {
-      PathEntryResource pathEntry =
-          fromRegistry(path);
-      if (log.isDebugEnabled()) {
-        log.debug("Resolved:\n{}", pathEntry);
-      }
-      return pathEntry;
-   
-    } catch (Exception e) {
-      throw buildException(path, e);
-    }
-  }
-
-
-  /**
-   * Build from the registry, filling up the children and service records.
-   * If there is no service record at the end of the path, that entry is 
-   * null
-   * @param path path to query
-   * @return the built up record
-   * @throws IOException problems
-   *
-   */
-  private PathEntryResource fromRegistry(String path) throws IOException {
-    PathEntryResource entry = new PathEntryResource();
-    try {
-      entry.service = registry.resolve(path);
-    } catch (NoRecordException e) {
-      // ignoring
-      log.debug("No record at {}", path);
-    } catch (InvalidRecordException e) {
-      // swallowing this exception, the sign of "no entry present"
-      // "nothing parseable"
-        log.warn("Failed to resolve {}: {}", path, e, e);
-    }
-    entry.nodes = registry.list(path);
-    return entry;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ClusterSpecificationBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ClusterSpecificationBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ClusterSpecificationBlock.java
deleted file mode 100644
index 79b687f..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ClusterSpecificationBlock.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.inject.Inject;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-
-/**
- * 
- */
-public class ClusterSpecificationBlock extends SliderHamletBlock {
-
-  @Inject
-  public ClusterSpecificationBlock(WebAppApi slider) {
-    super(slider);
-  }
-
-  @Override
-  protected void render(Block html) {
-    doRender(html);
-  }
-
-  // An extra method to make testing easier since you can't make an instance of Block
-  protected void doRender(Hamlet html) {
-    html.
-      div("cluster_json").
-        h2("JSON Cluster Specification").
-        pre().
-          _(getJson())._()._();
-  }
-
-  /**
-   * Get the JSON, catching any exceptions and returning error text instead
-   * @return
-   */
-  private String getJson() {
-    return appState.getApplication().toString();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ContainerStatsBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ContainerStatsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ContainerStatsBlock.java
deleted file mode 100644
index 4796d6c..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/ContainerStatsBlock.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Maps;
-import com.google.inject.Inject;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
-import org.apache.slider.api.ClusterNode;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.types.ComponentInformation;
-import org.apache.slider.server.appmaster.state.RoleInstance;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-
-import javax.annotation.Nonnull;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-/**
- * 
- */
-public class ContainerStatsBlock extends SliderHamletBlock {
-
-  private static final String EVEN = "even", ODD = "odd", BOLD = "bold", SCHEME = "http://", PATH = "/node/container/";
-
-  // Some functions that help transform the data into an object we can use to abstract presentation specifics
-  protected static final Function<Entry<String,Integer>,Entry<TableContent,Integer>> stringIntPairFunc = toTableContentFunction();
-  protected static final Function<Entry<String,Long>,Entry<TableContent,Long>> stringLongPairFunc = toTableContentFunction();
-  protected static final Function<Entry<String,String>,Entry<TableContent,String>> stringStringPairFunc = toTableContentFunction();
-
-  @Inject
-  public ContainerStatsBlock(WebAppApi slider) {
-    super(slider);
-  }
-
-  /**
-   * Sort a collection of ClusterNodes by name
-   */
-  protected static class ClusterNodeNameComparator implements Comparator<ClusterNode>,
-      Serializable {
-
-    @Override
-    public int compare(ClusterNode node1, ClusterNode node2) {
-      if (null == node1 && null != node2) {
-        return -1;
-      } else if (null != node1 && null == node2) {
-        return 1;
-      } else if (null == node1) {
-        return 0;
-      }
-
-      final String name1 = node1.name, name2 = node2.name;
-      if (null == name1 && null != name2) {
-        return -1;
-      } else if (null != name1 && null == name2) {
-        return 1;
-      } else if (null == name1) {
-        return 0;
-      }
-
-      return name1.compareTo(name2);
-    }
-
-  }
-
-  @Override
-  protected void render(Block html) {
-    final Map<String,RoleInstance> containerInstances = getContainerInstances(
-        appState.cloneOwnedContainerList());
-
-    Map<String, Map<String, ClusterNode>> clusterNodeMap =
-        appState.getRoleClusterNodeMapping();
-    Map<String, ComponentInformation> componentInfoMap = appState.getComponentInfoSnapshot();
-
-    for (Entry<String, Map<String, ClusterNode>> entry : clusterNodeMap.entrySet()) {
-      final String name = entry.getKey();
-      Map<String, ClusterNode> clusterNodesInRole = entry.getValue();
-      //final RoleStatus roleStatus = entry.getValue();
-
-      DIV<Hamlet> div = html.div("role-info ui-widget-content ui-corner-all");
-
-      List<ClusterNode> nodesInRole =
-          new ArrayList<>(clusterNodesInRole.values());
-
-      div.h2(BOLD, StringUtils.capitalize(name));
-
-      // Generate the details on this role
-      ComponentInformation componentInfo = componentInfoMap.get(name);
-      if (componentInfo != null) {
-        Iterable<Entry<String,Integer>> stats = componentInfo.buildStatistics().entrySet();
-        generateRoleDetails(div,"role-stats-wrap", "Specifications", 
-            Iterables.transform(stats, stringIntPairFunc));
-      }
-
-      // Sort the ClusterNodes by their name (containerid)
-      Collections.sort(nodesInRole, new ClusterNodeNameComparator());
-
-      // Generate the containers running this role
-      generateRoleDetails(div, "role-stats-containers", "Containers",
-          Iterables.transform(nodesInRole, new Function<ClusterNode,Entry<TableContent,String>>() {
-
-            @Override
-            public Entry<TableContent,String> apply(ClusterNode input) {
-              final String containerId = input.name;
-              
-              if (containerInstances.containsKey(containerId)) {
-                RoleInstance roleInst = containerInstances.get(containerId);
-                if (roleInst.container.getNodeHttpAddress() != null) {
-                  return Maps.<TableContent,String> immutableEntry(
-                    new TableAnchorContent(containerId,
-                        buildNodeUrlForContainer(roleInst.container.getNodeHttpAddress(), containerId)), null);
-                }
-              }
-              return Maps.immutableEntry(new TableContent(input.name), null);
-            }
-
-          }));
-
-      Application application = appState.getApplication();
-      Iterable<Entry<TableContent, String>> tableContent;
-      tableContent = Collections.emptySet();
-
-      // Generate the options used by this role
-      generateRoleDetails(div, "role-options-wrap", "Role Options", tableContent);
-
-      // Close the div for this role
-      div._();
-    }
-  }
-
-  protected static <T> Function<Entry<String,T>,Entry<TableContent,T>> toTableContentFunction() {
-    return new Function<Entry<String,T>,Entry<TableContent,T>>() {
-      @Override
-      public Entry<TableContent,T> apply(@Nonnull Entry<String,T> input) {
-        return Maps.immutableEntry(new TableContent(input.getKey()), input.getValue());
-      }
-    };
-  }
-
-  protected Map<String,RoleInstance> getContainerInstances(List<RoleInstance> roleInstances) {
-    Map<String,RoleInstance> map = Maps.newHashMapWithExpectedSize(roleInstances.size());
-    for (RoleInstance roleInstance : roleInstances) {
-      // UUID is the containerId
-      map.put(roleInstance.id, roleInstance);
-    }
-    return map;
-  }
-
-  /**
-   * Given a div, a name for this data, and some pairs of data, generate a nice HTML table. If contents is empty (of size zero), then a mesage will be printed
-   * that there were no items instead of an empty table.
-   *
-   */
-  protected <T1 extends TableContent,T2> void generateRoleDetails(DIV<Hamlet> parent, String divSelector, String detailsName, Iterable<Entry<T1,T2>> contents) {
-    final DIV<DIV<Hamlet>> div = parent.div(divSelector).h3(BOLD, detailsName);
-
-    int offset = 0;
-    TABLE<DIV<DIV<Hamlet>>> table = null;
-    TBODY<TABLE<DIV<DIV<Hamlet>>>> tbody = null;
-    for (Entry<T1,T2> content : contents) {
-      if (null == table) {
-        table = div.table("ui-widget-content ui-corner-bottom");
-        tbody = table.tbody();
-      }
-      
-      TR<TBODY<TABLE<DIV<DIV<Hamlet>>>>> row = tbody.tr(offset % 2 == 0 ? EVEN : ODD);
-      
-      // Defer to the implementation of the TableContent for what the cell should contain
-      content.getKey().printCell(row);
-
-      // Only add the second column if the element is non-null
-      // This also lets us avoid making a second method if we're only making a one-column table
-      if (null != content.getValue()) {
-        row.td(content.getValue().toString());
-      }
-
-      row._();
-
-      offset++;
-    }
-
-    // If we made a table, close it out
-    if (null != table) {
-      tbody._()._();
-    } else {
-      // Otherwise, throw in a nice "no content" message
-      div.p("no-table-contents")._("None")._();
-    }
-    
-    // Close out the initial div
-    div._();
-  }
-
-  /**
-   * Build a URL from the address:port and container ID directly to the NodeManager service
-   * @param nodeAddress
-   * @param containerId
-   * @return
-   */
-  protected String buildNodeUrlForContainer(String nodeAddress, String containerId) {
-    StringBuilder sb = new StringBuilder(SCHEME.length() + nodeAddress.length() + PATH.length() + containerId.length());
-
-    sb.append(SCHEME).append(nodeAddress).append(PATH).append(containerId);
-
-    return sb.toString();
-  }
-
-  /**
-   * Creates a table cell with the provided String as content.
-   */
-  protected static class TableContent {
-    private String cell;
-
-    public TableContent(String cell) {
-      this.cell = cell;
-    }
-
-    public String getCell() {
-      return cell;
-    }
-
-    /**
-     * Adds a td to the given tr. The tr is not closed 
-     * @param tableRow
-     */
-    public void printCell(TR<?> tableRow) {
-      tableRow.td(this.cell);
-    }
-  }
-
-  /**
-   * Creates a table cell with an anchor to the given URL with the provided String as content.
-   */
-  protected static class TableAnchorContent extends TableContent {
-    private String anchorUrl;
-
-    public TableAnchorContent(String cell, String anchorUrl) {
-      super(cell);
-      this.anchorUrl = anchorUrl;
-    }
-
-    /* (non-javadoc)
-     * @see org.apache.slider.server.appmaster.web.view.ContainerStatsBlock$TableContent#printCell()
-     */
-    @Override
-    public void printCell(TR<?> tableRow) {
-      tableRow.td().a(anchorUrl, getCell())._();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/IndexBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/IndexBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/IndexBlock.java
deleted file mode 100644
index c0a120d..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/IndexBlock.java
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.inject.Inject;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
-import org.apache.slider.api.types.ApplicationLivenessInformation;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.registry.docstore.ExportEntry;
-import org.apache.slider.core.registry.docstore.PublishedExports;
-import org.apache.slider.core.registry.docstore.PublishedExportsSet;
-import org.apache.hadoop.yarn.service.metrics.ServiceMetrics;
-import org.apache.slider.server.appmaster.state.RoleStatus;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import static org.apache.slider.server.appmaster.web.rest.RestPaths.LIVE_COMPONENTS;
-
-/**
- * The main content on the Slider AM web page
- */
-public class IndexBlock extends SliderHamletBlock {
-  private static final Logger log = LoggerFactory.getLogger(IndexBlock.class);
-
-  /**
-   * Message printed when application is at full size.
-   *
-   * {@value}
-   */
-  public static final String ALL_CONTAINERS_ALLOCATED = "all containers allocated";
-
-  @Inject
-  public IndexBlock(WebAppApi slider) {
-    super(slider);
-  }
-
-  @Override
-  protected void render(Block html) {
-    doIndex(html, getProviderName());
-  }
-
-  // An extra method to make testing easier since you can't make an instance of Block
-  @VisibleForTesting
-  protected void doIndex(Hamlet html, String providerName) {
-    String name = appState.getApplicationName();
-    if (name != null && (name.startsWith(" ") || name.endsWith(" "))) {
-      name = "'" + name + "'";
-    } 
-    DIV<Hamlet> div = html.div("general_info")
-                          .h1("index_header",
-                              "Application: " + name);
-
-    ApplicationLivenessInformation liveness =
-        appState.getApplicationLivenessInformation();
-    String livestatus = liveness.allRequestsSatisfied
-        ? ALL_CONTAINERS_ALLOCATED
-        : String.format("Awaiting %d containers", liveness.requestsOutstanding);
-    Hamlet.TABLE<DIV<Hamlet>> table1 = div.table();
-    table1.tr()
-          .td("Status")
-          .td(livestatus)
-          ._();
-    table1.tr()
-          .td("Total number of containers")
-          .td(Integer.toString(appState.getNumOwnedContainers()))
-          ._();
-    table1.tr()
-          .td("Create time: ")
-          .td("N/A")
-          ._();
-    table1.tr()
-          .td("Running since: ")
-          .td("N/A")
-          ._();
-    table1.tr()
-          .td("Time last flexed: ")
-          .td("N/A")
-          ._();
-    table1.tr()
-          .td("Application storage path: ")
-          .td("N/A")
-          ._();
-    table1.tr()
-          .td("Application configuration path: ")
-          .td("N/A")
-          ._();
-    table1._();
-    div._();
-    div = null;
-
-    DIV<Hamlet> containers = html.div("container_instances")
-      .h3("Component Instances");
-
-    int aaRoleWithNoSuitableLocations = 0;
-    int aaRoleWithOpenRequest = 0;
-    int roleWithOpenRequest = 0;
-
-    Hamlet.TABLE<DIV<Hamlet>> table = containers.table();
-    Hamlet.TR<Hamlet.THEAD<Hamlet.TABLE<DIV<Hamlet>>>> header = table.thead().tr();
-    trb(header, "Component");
-    trb(header, "Desired");
-    trb(header, "Actual");
-    trb(header, "Outstanding Requests");
-    trb(header, "Failed");
-    trb(header, "Failed to start");
-    trb(header, "Placement");
-    header._()._();  // tr & thead
-
-    List<RoleStatus> roleStatuses =
-        new ArrayList<>(appState.getRoleStatusMap().values());
-    Collections.sort(roleStatuses, new RoleStatus.CompareByName());
-    for (RoleStatus status : roleStatuses) {
-      String roleName = status.getName();
-      String nameUrl = apiPath(LIVE_COMPONENTS) + "/" + roleName;
-      String aatext;
-      if (status.isAntiAffinePlacement()) {
-        boolean aaRequestOutstanding = status.isAARequestOutstanding();
-        int pending = (int)status.getAAPending();
-        aatext = buildAADetails(aaRequestOutstanding, pending);
-        if (SliderUtils.isSet(status.getLabelExpression())) {
-          aatext += " (label: " + status.getLabelExpression() + ")";
-        }
-        if (pending > 0 && !aaRequestOutstanding) {
-          aaRoleWithNoSuitableLocations ++;
-        } else if (aaRequestOutstanding) {
-          aaRoleWithOpenRequest++;
-        }
-      } else {
-        if (SliderUtils.isSet(status.getLabelExpression())) {
-          aatext = "label: " + status.getLabelExpression();
-        } else {
-          aatext = "";
-        }
-        if (status.getRequested() > 0) {
-          roleWithOpenRequest ++;
-        }
-      }
-      ServiceMetrics metrics = status.getComponentMetrics();
-      table.tr()
-        .td().a(nameUrl, roleName)._()
-        .td(String.format("%d", metrics.containersDesired.value()))
-        .td(String.format("%d", metrics.containersRunning.value()))
-        .td(String.format("%d", metrics.containersRequested.value()))
-        .td(String.format("%d", metrics.containersFailed.value()))
-        .td(aatext)
-        ._();
-    }
-
-    // empty row for some more spacing
-    table.tr()._();
-    // close table
-    table._();
-
-    containers._();
-    containers = null;
-
-    // some spacing
-    html.div()._();
-    html.div()._();
-
-    DIV<Hamlet> diagnostics = html.div("diagnostics");
-
-    List<String> statusEntries = new ArrayList<>(0);
-    if (roleWithOpenRequest > 0) {
-      statusEntries.add(String.format("%d %s with requests unsatisfiable by cluster",
-          roleWithOpenRequest, plural(roleWithOpenRequest, "component")));
-    }
-    if (aaRoleWithNoSuitableLocations > 0) {
-      statusEntries.add(String.format("%d anti-affinity %s no suitable nodes in the cluster",
-        aaRoleWithNoSuitableLocations,
-        plural(aaRoleWithNoSuitableLocations, "component has", "components have")));
-    }
-    if (aaRoleWithOpenRequest > 0) {
-      statusEntries.add(String.format("%d anti-affinity %s with requests unsatisfiable by cluster",
-        aaRoleWithOpenRequest,
-        plural(aaRoleWithOpenRequest, "component has", "components have")));
-
-    }
-    if (!statusEntries.isEmpty()) {
-      diagnostics.h3("Diagnostics");
-      Hamlet.TABLE<DIV<Hamlet>> diagnosticsTable = diagnostics.table();
-      for (String entry : statusEntries) {
-        diagnosticsTable.tr().td(entry)._();
-      }
-      diagnosticsTable._();
-    }
-    diagnostics._();
-
-    DIV<Hamlet> provider_info = html.div("provider_info");
-    provider_info.h3(providerName + " information");
-    UL<Hamlet> ul = html.ul();
-    //TODO render app/cluster status
-    ul._();
-    provider_info._();
-
-    DIV<Hamlet> exports = html.div("exports");
-    exports.h3("Exports");
-    ul = html.ul();
-    enumeratePublishedExports(appState.getPublishedExportsSet(), ul);
-    ul._();
-    exports._();
-  }
-
-  @VisibleForTesting
-  String buildAADetails(boolean outstanding, int pending) {
-    return String.format("Anti-affinity:%s %d pending %s",
-      (outstanding ? " 1 active request and" : ""),
-      pending, plural(pending, "request"));
-  }
-
-  private String plural(int n, String singular) {
-    return plural(n, singular, singular + "s");
-  }
-  private String plural(int n, String singular, String plural) {
-    return n == 1 ? singular : plural;
-  }
-
-  private void trb(Hamlet.TR tr,
-      String text) {
-    tr.td().b(text)._();
-  }
-
-  private String getProviderName() {
-    return "docker";
-  }
-
-
-  protected void enumeratePublishedExports(PublishedExportsSet exports, UL<Hamlet> ul) {
-    for(String key : exports.keys()) {
-      PublishedExports export = exports.get(key);
-      LI<UL<Hamlet>> item = ul.li();
-      item.span().$class("bold")._(export.description)._();
-      UL sublist = item.ul();
-      for (Entry<String, Set<ExportEntry>> entry : export.sortedEntries()
-          .entrySet()) {
-        if (SliderUtils.isNotEmpty(entry.getValue())) {
-          LI sublistItem = sublist.li()._(entry.getKey());
-          for (ExportEntry exportEntry : entry.getValue()) {
-            sublistItem._(exportEntry.getValue());
-          }
-          sublistItem._();
-        }
-      }
-      sublist._();
-      item._();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/NavBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/NavBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/NavBlock.java
deleted file mode 100644
index 069d386..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/NavBlock.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.inject.Inject;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-
-import static org.apache.slider.server.appmaster.web.SliderAMWebApp.*;
-import static org.apache.slider.server.appmaster.web.rest.RestPaths.*;
-
-/**
- * 
- */
-public class NavBlock extends SliderHamletBlock {
-
-  @Inject
-  public NavBlock(WebAppApi slider) {
-    super(slider);
-  }
-
-  @Override
-  protected void render(Block html) {
-    html.
-      div("#nav").
-        h3("Slider").
-        ul().
-          li().a(this.prefix(), "Overview")._().
-          li().a(relPath(CONTAINER_STATS), "Statistics")._().
-          li().a(relPath(CLUSTER_SPEC), "Specification")._().
-          li().a(rootPath(SYSTEM_METRICS_JSON), "Metrics")._().
-          li().a(rootPath(SYSTEM_HEALTHCHECK), "Health")._().
-          li().a(rootPath(SYSTEM_THREADS), "Threads")._().
-        _()
-    .h3("REST API"). 
-        ul().
-          li().a(apiPath(MODEL_DESIRED), "Specified")._().
-          li().a(apiPath(MODEL_RESOLVED), "Resolved")._().
-          li().a(apiPath(LIVE_RESOURCES), "Resources")._().
-          li().a(apiPath(LIVE_COMPONENTS), "Components")._().
-          li().a(apiPath(LIVE_CONTAINERS), "Containers")._().
-          li().a(apiPath(LIVE_NODES), "Nodes")._().
-          li().a(apiPath(LIVE_STATISTICS), "Statistics")._().
-          li().a(apiPath(LIVE_LIVENESS), "Liveness")._()
-        ._()
-      ._();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/SliderHamletBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/SliderHamletBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/SliderHamletBlock.java
deleted file mode 100644
index 5f44bda..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/view/SliderHamletBlock.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.view;
-
-import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.slider.server.appmaster.state.StateAccessForProviders;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.rest.RestPaths;
-
-import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
-import static org.apache.slider.server.appmaster.web.rest.RestPaths.SLIDER_PATH_APPLICATION;
-
-/**
- * Anything we want to share across slider hamlet blocks
- */
-public abstract class SliderHamletBlock extends HtmlBlock  {
-
-  protected final StateAccessForProviders appState;
-  protected final RestPaths restPaths = new RestPaths();
-  
-  public SliderHamletBlock(WebAppApi slider) {
-    this.appState = slider.getAppState();
-  }
-
-  protected String rootPath(String absolutePath) {
-    return root_url(absolutePath);
-  }
-
-  protected String relPath(String... args) {
-    return ujoin(this.prefix(), args);
-  }
-
-  protected String apiPath(String api) {
-    return root_url(SLIDER_PATH_APPLICATION,  api);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/LoadedRoleHistory.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/LoadedRoleHistory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/LoadedRoleHistory.java
deleted file mode 100644
index 77408a5..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/LoadedRoleHistory.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.avro;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.slider.common.tools.SliderUtils;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * The role history
- */
-public class LoadedRoleHistory {
-
-  private RoleHistoryHeader header;
-
-  private Path path;
-
-  public final Map<String, Integer> roleMap = new HashMap<>();
-
-  public final List<NodeEntryRecord> records = new ArrayList<>();
-
-  /**
-   * Add a record
-   * @param record
-   */
-  public void add(NodeEntryRecord record) {
-    records.add(record);
-  }
-
-  /**
-   * Number of loaded records
-   * @return
-   */
-  public int size() {
-    return records.size();
-  }
-
-  public RoleHistoryHeader getHeader() {
-    return header;
-  }
-
-  public void setHeader(RoleHistoryHeader header) {
-    this.header = header;
-  }
-
-  public Path getPath() {
-    return path;
-  }
-
-  public void setPath(Path path) {
-    this.path = path;
-  }
-
-  public void buildMapping(Map<CharSequence, Integer> source) {
-    roleMap.clear();
-    for (Map.Entry<CharSequence, Integer> entry : source.entrySet()) {
-      roleMap.put(SliderUtils.sequenceToString(entry.getKey()),
-          entry.getValue());
-    }
-  }
-
-  @Override
-  public String toString() {
-    final StringBuilder sb = new StringBuilder(
-      "LoadedRoleHistory{");
-    sb.append("path=").append(path);
-    sb.append("; number of roles=").append(roleMap.size());
-    sb.append("; size=").append(size());
-    sb.append('}');
-    return sb.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/NewerFilesFirst.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/NewerFilesFirst.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/NewerFilesFirst.java
deleted file mode 100644
index 2e049cb..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/NewerFilesFirst.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.avro;
-
-import org.apache.hadoop.fs.Path;
-
-import java.io.Serializable;
-import java.util.Comparator;
-
-/**
- * Compare two filenames by name; the more recent one comes first
- */
-public class NewerFilesFirst implements Comparator<Path>, Serializable {
-
-  /**
-   * Takes the ordering of path names from the normal string comparison
-   * and negates it, so that names that come after other names in 
-   * the string sort come before here
-   * @param o1 leftmost 
-   * @param o2 rightmost
-   * @return positive if o1 &gt; o2 
-   */
-  @Override
-  public int compare(Path o1, Path o2) {
-    return (o2.getName().compareTo(o1.getName()));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/OlderFilesFirst.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/OlderFilesFirst.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/OlderFilesFirst.java
deleted file mode 100644
index 407aaa6..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/OlderFilesFirst.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.avro;
-
-import org.apache.hadoop.fs.Path;
-
-import java.io.Serializable;
-import java.util.Comparator;
-
-/**
- * Compare two filenames by name; the older ones comes first
- */
-public class OlderFilesFirst implements Comparator<Path>, Serializable {
-
-  /**
-   * Takes the ordering of path names from the normal string comparison
-   * and negates it, so that names that come after other names in 
-   * the string sort come before here
-   * @param o1 leftmost 
-   * @param o2 rightmost
-   * @return positive if o1 &gt; o2 
-   */
-  @Override
-  public int compare(Path o1, Path o2) {
-    return (o1.getName().compareTo(o2.getName()));
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/RoleHistoryWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/RoleHistoryWriter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/RoleHistoryWriter.java
deleted file mode 100644
index 52553d0..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/avro/RoleHistoryWriter.java
+++ /dev/null
@@ -1,449 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.avro;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.avro.AvroTypeException;
-import org.apache.avro.Schema;
-import org.apache.avro.io.DatumReader;
-import org.apache.avro.io.DatumWriter;
-import org.apache.avro.io.Decoder;
-import org.apache.avro.io.DecoderFactory;
-import org.apache.avro.io.Encoder;
-import org.apache.avro.io.EncoderFactory;
-import org.apache.avro.specific.SpecificDatumReader;
-import org.apache.avro.specific.SpecificDatumWriter;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.GlobFilter;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.apache.slider.server.appmaster.state.NodeEntry;
-import org.apache.slider.server.appmaster.state.NodeInstance;
-import org.apache.slider.server.appmaster.state.RoleHistory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.EOFException;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.Locale;
-import java.util.Map;
-
-/**
- * Write out the role history to an output stream.
- */
-public class RoleHistoryWriter {
-  protected static final Logger log =
-    LoggerFactory.getLogger(RoleHistoryWriter.class);
-
-  /**
-   * Although Avro is designed to handle some changes, we still keep a version
-   * marker in the file to catch changes that are fundamentally incompatible
-   * at the semantic level -changes that require either a different
-   * parser or get rejected outright.
-   */
-  public static final int ROLE_HISTORY_VERSION = 0x01;
-  
-  /**
-   * Write out the history.
-   * This does not update the history's dirty/savetime fields
-   *
-   * @param out outstream
-   * @param history history
-   * @param savetime time in millis for the save time to go in as a record
-   * @return no of records written
-   * @throws IOException IO failures
-   */
-  public long write(OutputStream out, RoleHistory history, long savetime)
-    throws IOException {
-    try {
-      DatumWriter<RoleHistoryRecord> writer =
-        new SpecificDatumWriter<>(RoleHistoryRecord.class);
-
-      RoleHistoryRecord record = createHeaderRecord(savetime, history);
-      int roles = history.getRoleSize();
-      Schema schema = record.getSchema();
-      Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
-      writer.write(record, encoder);
-      // now write the rolemap record
-      writer.write(createRolemapRecord(history), encoder);
-      long count = 0;
-      //now for every role history entry, write out its record
-      Collection<NodeInstance> instances = history.cloneNodemap().values();
-      for (NodeInstance instance : instances) {
-        for (int role = 0; role < roles; role++) {
-          NodeEntry nodeEntry = instance.get(role);
-
-          if (nodeEntry != null) {
-            NodeEntryRecord ner = build(nodeEntry, role, instance.hostname);
-            record = new RoleHistoryRecord(ner);
-            writer.write(record, encoder);
-            count++;
-          }
-        }
-      }
-      // footer
-      RoleHistoryFooter footer = new RoleHistoryFooter();
-      footer.setCount(count);
-      writer.write(new RoleHistoryRecord(footer), encoder);
-      encoder.flush();
-      return count;
-    } finally {
-      out.close();
-    }
-  }
-
-  /**
-   * Create the header record
-   * @param savetime time of save
-   * @param history history
-   * @return a record to place at the head of the file
-   */
-  private RoleHistoryRecord createHeaderRecord(long savetime, RoleHistory history) {
-    RoleHistoryHeader header = new RoleHistoryHeader();
-    header.setVersion(ROLE_HISTORY_VERSION);
-    header.setSaved(savetime);
-    header.setSavedx(Long.toHexString(savetime));
-    header.setSavedate(SliderUtils.toGMTString(savetime));
-    header.setRoles(history.getRoleSize());
-    return new RoleHistoryRecord(header);
-  }
-
-  /**
-   * Create the rolemap record
-   * @param history history
-   * @return a record to insert into the file
-   */
-  private RoleHistoryRecord createRolemapRecord(RoleHistory history) {
-    RoleHistoryMapping entry = new RoleHistoryMapping();
-    Map<CharSequence, Integer> mapping = history.buildMappingForHistoryFile();
-    entry.setRolemap(mapping);
-    return new RoleHistoryRecord(entry);
-  }
-
-  /**
-   * Write the history information to a file
-   *
-   * @param fs filesystem
-   * @param path path
-   * @param overwrite overwrite flag
-   * @param history history
-   * @param savetime time in millis for the save time to go in as a record
-   * @return no of records written
-   * @throws IOException IO failures
-   */
-  public long write(FileSystem fs,
-      Path path,
-      boolean overwrite,
-      RoleHistory history,
-      long savetime)
-      throws IOException {
-    FSDataOutputStream out = fs.create(path, overwrite);
-    return write(out, history, savetime);
-  }
-
-
-  /**
-   * Create the filename for a history file
-   * @param time time value
-   * @return a filename such that later filenames sort later in the directory
-   */
-  public Path createHistoryFilename(Path historyPath, long time) {
-    String filename = String.format(Locale.ENGLISH,
-                                    SliderKeys.HISTORY_FILENAME_CREATION_PATTERN,
-                                    time);
-    Path path = new Path(historyPath, filename);
-    return path;
-  }
-
-  /**
-   * Build a {@link NodeEntryRecord} from a node entry; include whether
-   * the node is in use and when it was last used.
-   * @param entry entry count
-   * @param role role index
-   * @param hostname name
-   * @return the record
-   */
-  private NodeEntryRecord build(NodeEntry entry, int role, String hostname) {
-    NodeEntryRecord record = new NodeEntryRecord(
-      hostname, role, entry.getLive() > 0, entry.getLastUsed()
-    );
-    return record;
-  }
-
-  /**
-   * Read a history, returning one that is ready to have its onThaw() 
-   * method called
-   * @param in input source
-   * @return no. of entries read
-   * @throws IOException problems
-   */
-  public LoadedRoleHistory read(InputStream in) throws
-                                                       IOException,
-                                                       BadConfigException {
-    try {
-      LoadedRoleHistory loadedRoleHistory = new LoadedRoleHistory();
-      DatumReader<RoleHistoryRecord> reader =
-        new SpecificDatumReader<>(RoleHistoryRecord.class);
-      Decoder decoder =
-        DecoderFactory.get().jsonDecoder(RoleHistoryRecord.getClassSchema(),
-            in);
-
-      //read header : no entry -> EOF
-      RoleHistoryRecord record = reader.read(null, decoder);
-      if (record == null) {
-        throw new IOException("Role History Header not found at start of file.");
-      }
-      Object entry = record.getEntry();
-      if (!(entry instanceof RoleHistoryHeader)) {
-        throw new IOException("Role History Header not found at start of file");
-      }
-      RoleHistoryHeader header = (RoleHistoryHeader) entry;
-      if (header.getVersion() != ROLE_HISTORY_VERSION) {
-        throw new IOException(
-          String.format("Can't read role file version %04x -need %04x",
-          header.getVersion(),
-          ROLE_HISTORY_VERSION));
-      }
-      loadedRoleHistory.setHeader(header);
-      RoleHistoryFooter footer = null;
-      int records = 0;
-      //go through reading data
-      try {
-        while (footer == null) {
-          record = reader.read(null, decoder);
-          if (record == null) {
-            throw new IOException("Null record after " + records + " records");
-          }
-          entry = record.getEntry();
-
-          if (entry instanceof RoleHistoryHeader) {
-            throw new IOException("Duplicate Role History Header found");
-          } else if (entry instanceof RoleHistoryMapping) {
-            // role history mapping entry
-            if (!loadedRoleHistory.roleMap.isEmpty()) {
-              // duplicate role maps are viewed as something to warn over, rather than fail
-              log.warn("Duplicate role map; ignoring");
-            } else {
-              RoleHistoryMapping historyMapping = (RoleHistoryMapping) entry;
-              loadedRoleHistory.buildMapping(historyMapping.getRolemap());
-            }
-          } else if (entry instanceof NodeEntryRecord) {
-            // normal record
-            records++;
-            NodeEntryRecord nodeEntryRecord = (NodeEntryRecord) entry;
-            loadedRoleHistory.add(nodeEntryRecord);
-          } else if (entry instanceof RoleHistoryFooter) {
-            //tail end of the file
-            footer = (RoleHistoryFooter) entry;
-          } else {
-            // this is to handle future versions, such as when rolling back
-            // from a later version of slider
-            log.warn("Discarding unknown record {}", entry);
-          }
-        }
-      } catch (EOFException e) {
-        EOFException ex = new EOFException(
-          "End of file reached after " + records + " records");
-        ex.initCause(e);
-        throw ex;
-      }
-      // at this point there should be no data left.
-      // check by reading and expecting a -1
-      if (in.read() > 0) {
-        // footer is in stream before the last record
-        throw new EOFException(
-          "File footer reached before end of file -after " + records +
-          " records");
-      }
-      if (records != footer.getCount()) {
-        log.warn("mismatch between no of records saved {} and number read {}",
-                 footer.getCount(), records);
-      }
-      return loadedRoleHistory;
-    } finally {
-      in.close();
-    }
-
-  }
-
-  /**
-   * Read a role history from a path in a filesystem
-   * @param fs filesystem
-   * @param path path to the file
-   * @return the records read
-   * @throws IOException any problem
-   */
-  public LoadedRoleHistory read(FileSystem fs, Path path)
-      throws IOException, BadConfigException {
-    FSDataInputStream instream = fs.open(path);
-    return read(instream);
-  }
-
-  /**
-   * Read from a resource in the classpath -used for testing
-   * @param resource resource
-   * @return the records read
-   * @throws IOException any problem
-   */
-  public LoadedRoleHistory read(String resource)
-      throws IOException, BadConfigException {
-
-    return read(this.getClass().getClassLoader().getResourceAsStream(resource));
-  }
-
-
-  /**
-   * Find all history entries in a dir. The dir is created if it is
-   * not already defined.
-   * 
-   * The scan uses the match pattern {@link SliderKeys#HISTORY_FILENAME_MATCH_PATTERN}
-   * while dropping empty files and directories which match the pattern.
-   * The list is then sorted with a comparator that sorts on filename,
-   * relying on the filename of newer created files being later than the old ones.
-   * 
-   * 
-   *
-   * @param fs filesystem
-   * @param dir dir to scan
-   * @param includeEmptyFiles should empty files be included in the result?
-   * @return a possibly empty list
-   * @throws IOException IO problems
-   * @throws FileNotFoundException if the target dir is actually a path
-   */
-  public List<Path> findAllHistoryEntries(FileSystem fs,
-                                          Path dir,
-                                          boolean includeEmptyFiles) throws IOException {
-    assert fs != null;
-    assert dir != null;
-    if (!fs.exists(dir)) {
-      fs.mkdirs(dir);
-    } else if (!fs.isDirectory(dir)) {
-      throw new FileNotFoundException("Not a directory " + dir.toString());
-    }
-    
-    PathFilter filter = new GlobFilter(SliderKeys.HISTORY_FILENAME_GLOB_PATTERN);
-    FileStatus[] stats = fs.listStatus(dir, filter);
-    List<Path> paths = new ArrayList<Path>(stats.length);
-    for (FileStatus stat : stats) {
-      log.debug("Possible entry: {}", stat.toString());
-      if (stat.isFile() && (includeEmptyFiles || stat.getLen() > 0)) {
-        paths.add(stat.getPath());
-      }
-    }
-    sortHistoryPaths(paths);
-    return paths;
-  }
-
-  @VisibleForTesting
-  public static void sortHistoryPaths(List<Path> paths) {
-    Collections.sort(paths, new NewerFilesFirst());
-  }
-  
-  /**
-   * Iterate through the paths until one can be loaded
-   * @param paths paths to load
-   * @return the loaded history including the path -or null if all failed to load
-   */
-  public LoadedRoleHistory attemptToReadHistory(FileSystem fileSystem,
-      List<Path> paths)
-      throws BadConfigException {
-    ListIterator<Path> pathIterator = paths.listIterator();
-    boolean success = false;
-    LoadedRoleHistory history = null;
-    while (!success && pathIterator.hasNext()) {
-      Path path = pathIterator.next();
-      try {
-        history = read(fileSystem, path);
-        //success
-        success = true;
-        history.setPath(path);
-      } catch (IOException e) {
-        log.info("Failed to read {}", path, e);
-      } catch (AvroTypeException e) {
-        log.warn("Failed to parse {}", path, e);
-      } catch (Exception e) {
-        // low level event logged @ warn level
-        log.warn("Exception while reading {}", path, e);
-      }
-    }
-    return history;
-  }
-
-  /**
-   * Try to load the history from a directory -a failure to load a specific
-   * file is downgraded to a log and the next older path attempted instead
-   * @param fs filesystem
-   * @param dir dir to load from
-   * @return the history loaded, including the path
-   * @throws IOException if indexing the history directory fails. 
-   */
-  public LoadedRoleHistory loadFromHistoryDir(FileSystem fs, Path dir)
-      throws IOException, BadConfigException {
-    assert fs != null: "null filesystem";
-    List<Path> entries = findAllHistoryEntries(fs, dir, false);
-    return attemptToReadHistory(fs, entries);
-  }
-
-  /**
-   * Delete all old history entries older than the one we want to keep. This
-   * uses the filename ordering to determine age, not timestamps
-   * @param fileSystem filesystem
-   * @param keep path to keep -used in thresholding the files
-   * @return the number of files deleted
-   * @throws FileNotFoundException if the path to keep is not present (safety
-   * check to stop the entire dir being purged)
-   * @throws IOException IO problems
-   */
-  public int purgeOlderHistoryEntries(FileSystem fileSystem, Path keep)
-      throws IOException { assert fileSystem != null : "null filesystem";
-    if (!fileSystem.exists(keep)) {
-      throw new FileNotFoundException(keep.toString());
-    }
-    Path dir = keep.getParent();
-    log.debug("Purging entries in {} up to {}", dir, keep);
-    List<Path> paths = findAllHistoryEntries(fileSystem, dir, true);
-    Collections.sort(paths, new OlderFilesFirst());
-    int deleteCount = 0;
-    for (Path path : paths) {
-      if (path.equals(keep)) {
-        break;
-      } else {
-        log.debug("Deleting {}", path);
-        deleteCount++;
-        fileSystem.delete(path, false);
-      }
-    }
-    return deleteCount;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/HttpProbe.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/HttpProbe.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/HttpProbe.java
deleted file mode 100644
index 5eba622..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/HttpProbe.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.servicemonitor;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.slider.common.tools.SliderUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Map;
-
-public class HttpProbe extends Probe {
-  protected static final Logger log = LoggerFactory.getLogger(HttpProbe.class);
-
-  private static final String HOST_TOKEN = "${THIS_HOST}";
-
-  private final String urlString;
-  private final int timeout;
-  private final int min, max;
-
-
-  public HttpProbe(String url, int timeout, int min, int max, Configuration
-      conf) {
-    super("Http probe of " + url + " [" + min + "-" + max + "]", conf);
-    this.urlString = url;
-    this.timeout = timeout;
-    this.min = min;
-    this.max = max;
-  }
-
-  public static HttpProbe create(Map<String, String> props)
-      throws IOException {
-    String urlString = getProperty(props, WEB_PROBE_URL, null);
-    new URL(urlString);
-    int timeout = getPropertyInt(props, WEB_PROBE_CONNECT_TIMEOUT,
-        WEB_PROBE_CONNECT_TIMEOUT_DEFAULT);
-    int minSuccess = getPropertyInt(props, WEB_PROBE_MIN_SUCCESS,
-        WEB_PROBE_MIN_SUCCESS_DEFAULT);
-    int maxSuccess = getPropertyInt(props, WEB_PROBE_MAX_SUCCESS,
-        WEB_PROBE_MAX_SUCCESS_DEFAULT);
-    return new HttpProbe(urlString, timeout, minSuccess, maxSuccess, null);
-  }
-
-
-  private static HttpURLConnection getConnection(URL url, int timeout) throws
-      IOException {
-    HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-    connection.setInstanceFollowRedirects(true);
-    connection.setConnectTimeout(timeout);
-    return connection;
-  }
-
-  @Override
-  public ProbeStatus ping(ComponentInstance instance) {
-    ProbeStatus status = new ProbeStatus();
-    ContainerStatus containerStatus = instance.getContainerStatus();
-    if (containerStatus == null || SliderUtils.isEmpty(containerStatus.getIPs())
-        || StringUtils.isEmpty(containerStatus.getHost())) {
-      status.fail(this, new IOException("IP is not available yet"));
-      return status;
-    }
-
-    String ip = containerStatus.getIPs().get(0);
-    HttpURLConnection connection = null;
-    try {
-      URL url = new URL(urlString.replace(HOST_TOKEN, ip));
-      connection = getConnection(url, this.timeout);
-      int rc = connection.getResponseCode();
-      if (rc < min || rc > max) {
-        String error = "Probe " + url + " error code: " + rc;
-        log.info(error);
-        status.fail(this,
-            new IOException(error));
-      } else {
-        status.succeed(this);
-      }
-    } catch (Throwable e) {
-      String error = "Probe " + urlString + " failed for IP " + ip + ": " + e;
-      log.info(error, e);
-      status.fail(this,
-          new IOException(error, e));
-    } finally {
-      if (connection != null) {
-        connection.disconnect();
-      }
-    }
-    return status;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/LogEntryBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/LogEntryBuilder.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/LogEntryBuilder.java
deleted file mode 100644
index a1ad44f..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/LogEntryBuilder.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.servicemonitor;
-
-/**
- * Build up log entries for ease of splunk
- */
-public class LogEntryBuilder {
-
-  private final StringBuilder builder = new StringBuilder();
-
-  public LogEntryBuilder() {
-  }
-
-  public LogEntryBuilder(String text) {
-    elt(text);
-  }
-
-
-  public LogEntryBuilder(String name, Object value) {
-    entry(name, value);
-  }
-
-  public LogEntryBuilder elt(String text) {
-    addComma();
-    builder.append(text);
-    return this;
-  }
-
-  public LogEntryBuilder elt(String name, Object value) {
-    addComma();
-    entry(name, value);
-    return this;
-  }
-
-  private void addComma() {
-    if (!isEmpty()) {
-      builder.append(", ");
-    }
-  }
-
-  private void entry(String name, Object value) {
-    builder.append(name).append('=');
-    if (value != null) {
-      builder.append('"').append(value.toString()).append('"');
-    } else {
-      builder.append("null");
-    }
-  }
-
-  @Override
-  public String toString() {
-    return builder.toString();
-  }
-
-  private boolean isEmpty() {
-    return builder.length() == 0;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorKeys.java
deleted file mode 100644
index e97ab43..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorKeys.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.servicemonitor;
-
-/**
- * Config keys for monitoring
- */
-public interface MonitorKeys {
-
-  /**
-   * Port probing key : port to attempt to create a TCP connection to {@value}.
-   */
-  String PORT_PROBE_PORT = "port";
-  /**
-   * Port probing key : timeout for the the connection attempt {@value}.
-   */
-  String PORT_PROBE_CONNECT_TIMEOUT = "timeout";
-  /**
-   * Port probing default : timeout for the connection attempt {@value}.
-   */
-  int PORT_PROBE_CONNECT_TIMEOUT_DEFAULT = 1000;
-
-  /**
-   * Web probing key : URL {@value}.
-   */
-  String WEB_PROBE_URL = "url";
-  /**
-   * Web probing key : min success code {@value}.
-   */
-  String WEB_PROBE_MIN_SUCCESS = "min.success";
-  /**
-   * Web probing key : max success code {@value}.
-   */
-  String WEB_PROBE_MAX_SUCCESS = "max.success";
-  /**
-   * Web probing default : min successful response code {@value}.
-   */
-  int WEB_PROBE_MIN_SUCCESS_DEFAULT = 200;
-  /**
-   * Web probing default : max successful response code {@value}.
-   */
-  int WEB_PROBE_MAX_SUCCESS_DEFAULT = 299;
-  /**
-   * Web probing key : timeout for the connection attempt {@value}
-   */
-  String WEB_PROBE_CONNECT_TIMEOUT = "timeout";
-  /**
-   * Port probing default : timeout for the connection attempt {@value}.
-   */
-  int WEB_PROBE_CONNECT_TIMEOUT_DEFAULT = 1000;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bf581071/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorUtils.java
deleted file mode 100644
index 1e5c94c..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/servicemonitor/MonitorUtils.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.servicemonitor;
-
-import org.apache.slider.api.resource.ReadinessCheck;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Formatter;
-import java.util.Locale;
-
-/**
- * Various utils to work with the monitor
- */
-public final class MonitorUtils {
-  protected static final Logger LOG = LoggerFactory.getLogger(MonitorUtils
-      .class);
-
-  private MonitorUtils() {
-  }
-
-  public static String toPlural(int val) {
-    return val != 1 ? "s" : "";
-  }
-
-  /**
-   * Convert milliseconds to human time -the exact format is unspecified
-   * @param milliseconds a time in milliseconds
-   * @return a time that is converted to human intervals
-   */
-  public static String millisToHumanTime(long milliseconds) {
-    StringBuilder sb = new StringBuilder();
-    // Send all output to the Appendable object sb
-    Formatter formatter = new Formatter(sb, Locale.US);
-
-    long s = Math.abs(milliseconds / 1000);
-    long m = Math.abs(milliseconds % 1000);
-    if (milliseconds > 0) {
-      formatter.format("%d.%03ds", s, m);
-    } else if (milliseconds == 0) {
-      formatter.format("0");
-    } else {
-      formatter.format("-%d.%03ds", s, m);
-    }
-    return sb.toString();
-  }
-
-  public static Probe getProbe(ReadinessCheck readinessCheck) {
-    if (readinessCheck == null) {
-      return null;
-    }
-    if (readinessCheck.getType() == null) {
-      return null;
-    }
-    try {
-      switch (readinessCheck.getType()) {
-      case HTTP:
-        return HttpProbe.create(readinessCheck.getProps());
-      case PORT:
-        return PortProbe.create(readinessCheck.getProps());
-      default:
-        return null;
-      }
-    } catch (Throwable t) {
-      throw new IllegalArgumentException("Error creating readiness check " +
-          t);
-    }
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org