You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2017/09/25 23:38:02 UTC

[03/86] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
deleted file mode 100644
index 9c5708f..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.model.mock;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.NodeReport;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
-import org.apache.slider.server.appmaster.operations.AbstractRMOperation;
-import org.apache.slider.server.appmaster.operations.CancelSingleRequest;
-import org.apache.slider.server.appmaster.operations.ContainerReleaseOperation;
-import org.apache.slider.server.appmaster.operations.ContainerRequestOperation;
-import org.junit.Assert;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertNotNull;
-
-/**
- * This is an evolving engine to mock YARN operations.
- */
-public class MockYarnEngine {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(MockYarnEngine.class);
-
-  private MockYarnCluster cluster;
-  private Allocator allocator;
-  private List<ContainerRequestOperation> pending = new ArrayList<>();
-
-  private ApplicationId appId = new MockApplicationId(0, 0);
-
-  private ApplicationAttemptId attemptId = new MockApplicationAttemptId(appId,
-      1);
-
-  @Override
-  public String toString() {
-    return "MockYarnEngine " + cluster + " + pending=" + pending.size();
-  }
-
-  public int containerCount() {
-    return cluster.containersInUse();
-  }
-
-  public MockYarnEngine(int clusterSize, int containersPerNode) {
-    cluster = new MockYarnCluster(clusterSize, containersPerNode);
-    allocator = new Allocator(cluster);
-  }
-
-  public MockYarnCluster getCluster() {
-    return cluster;
-  }
-
-  public Allocator getAllocator() {
-    return allocator;
-  }
-
-  /**
-   * Allocate a container from a request. The containerID will be
-   * unique, nodeId and other fields chosen internally with
-   * no such guarantees; resource and priority copied over
-   * @param request request
-   * @return container
-   */
-  public Container allocateContainer(AMRMClient.ContainerRequest request) {
-    MockContainer allocated = allocator.allocate(request);
-    if (allocated != null) {
-      MockContainerId id = (MockContainerId)allocated.getId();
-      id.setApplicationAttemptId(attemptId);
-    }
-    return allocated;
-  }
-
-  MockYarnCluster.MockYarnClusterContainer releaseContainer(ContainerId
-      containerId) {
-    return cluster.release(containerId);
-  }
-
-  /**
-   * Process a list of operations -release containers to be released,
-   * allocate those for which there is space (but don't rescan the list after
-   * the scan).
-   * @param ops
-   * @return
-   */
-  public List<Container> execute(List<AbstractRMOperation> ops) {
-    return execute(ops, new ArrayList<>());
-  }
-
-  /**
-   * Process a list of operations -release containers to be released,
-   * allocate those for which there is space (but don't rescan the list after
-   * the scan). Unsatisifed entries are appended to the "pending" list
-   * @param ops operations
-   * @return the list of all satisfied operations
-   */
-  public List<Container> execute(List<AbstractRMOperation> ops,
-                               List<ContainerId> released) {
-    validateRequests(ops);
-    List<Container> allocation = new ArrayList<>();
-    for (AbstractRMOperation op : ops) {
-      if (op instanceof ContainerReleaseOperation) {
-        ContainerReleaseOperation cro = (ContainerReleaseOperation) op;
-        ContainerId cid = cro.getContainerId();
-        assertNotNull(releaseContainer(cid));
-        released.add(cid);
-      } else if (op instanceof CancelSingleRequest) {
-        // no-op
-        LOG.debug("cancel request {}", op);
-      } else if (op instanceof ContainerRequestOperation) {
-        ContainerRequestOperation req = (ContainerRequestOperation) op;
-        Container container = allocateContainer(req.getRequest());
-        if (container != null) {
-          LOG.info("allocated container {} for {}", container, req);
-          allocation.add(container);
-        } else {
-          LOG.debug("Unsatisfied allocation {}", req);
-          pending.add(req);
-        }
-      } else {
-        LOG.warn("Unsupported operation {}", op);
-      }
-    }
-    return allocation;
-  }
-
-  /**
-   * Try and mimic some of the logic of <code>AMRMClientImpl
-   * .checkLocalityRelaxationConflict</code>.
-   * @param ops operations list
-   */
-  void validateRequests(List<AbstractRMOperation> ops) {
-    // run through the requests and verify that they are all consistent.
-    List<ContainerRequestOperation> outstandingRequests = new ArrayList<>();
-    for (AbstractRMOperation operation : ops) {
-      if (operation instanceof ContainerRequestOperation) {
-        ContainerRequestOperation containerRequest =
-            (ContainerRequestOperation) operation;
-        ContainerRequest amRequest = containerRequest.getRequest();
-        Priority priority = amRequest.getPriority();
-        boolean relax = amRequest.getRelaxLocality();
-
-        for (ContainerRequestOperation req : outstandingRequests) {
-          if (req.getPriority() == priority && req.getRelaxLocality() !=
-              relax) {
-            // mismatch in values
-            Assert.fail("operation " + operation + " has incompatible request" +
-                    " priority from outsanding request");
-          }
-          outstandingRequests.add(containerRequest);
-
-        }
-
-      }
-    }
-  }
-
-  /**
-   * Get the list of node reports. These are not cloned; updates will persist
-   * in the nodemap.
-   * @return current node report list
-   */
-  List<NodeReport> getNodeReports() {
-    return cluster.getNodeReports();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/monkey/TestMockMonkey.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/monkey/TestMockMonkey.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/monkey/TestMockMonkey.java
deleted file mode 100644
index 16bd195..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/monkey/TestMockMonkey.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.model.monkey;
-
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.slider.api.InternalKeys;
-import org.apache.slider.server.appmaster.actions.ActionHalt;
-import org.apache.slider.server.appmaster.actions.ActionKillContainer;
-import org.apache.slider.server.appmaster.actions.AsyncAction;
-import org.apache.slider.server.appmaster.actions.QueueService;
-import org.apache.slider.server.appmaster.model.mock.BaseMockAppStateTest;
-import org.apache.slider.server.appmaster.model.mock.MockRMOperationHandler;
-import org.apache.slider.server.appmaster.monkey.ChaosKillAM;
-import org.apache.slider.server.appmaster.monkey.ChaosKillContainer;
-import org.apache.slider.server.appmaster.monkey.ChaosMonkeyService;
-import org.apache.slider.server.appmaster.monkey.ChaosTarget;
-import org.apache.slider.server.appmaster.operations.ContainerReleaseOperation;
-import org.apache.slider.server.appmaster.state.RoleInstance;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Test chaos monkey.
- */
-public class TestMockMonkey extends BaseMockAppStateTest {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(TestMockMonkey.class);
-
-  /**
-   * This queue service is NOT started; tests need to poll the queue
-   * rather than expect them to execute.
-   */
-  private QueueService queues;
-  private ChaosMonkeyService monkey;
-
-  @Before
-  public void init() {
-    YarnConfiguration configuration = new YarnConfiguration();
-    queues = new QueueService();
-    queues.init(configuration);
-    monkey = new ChaosMonkeyService(METRICS.getMetrics(), queues);
-    monkey.init(configuration);
-  }
-
-  //@Test
-  public void testMonkeyStart() throws Throwable {
-    monkey.start();
-    monkey.stop();
-  }
-
-  //@Test
-  public void testMonkeyPlay() throws Throwable {
-    ChaosCounter counter = new ChaosCounter();
-    monkey.addTarget("target", counter, InternalKeys.PROBABILITY_PERCENT_100);
-    assertEquals(1, monkey.getTargetCount());
-    monkey.play();
-    assertEquals(1, counter.count);
-  }
-
-  //@Test
-  public void testMonkeySchedule() throws Throwable {
-    ChaosCounter counter = new ChaosCounter();
-    assertEquals(0, monkey.getTargetCount());
-    monkey.addTarget("target", counter, InternalKeys.PROBABILITY_PERCENT_100);
-    assertEquals(1, monkey.getTargetCount());
-    assertTrue(monkey.schedule(0, 1, TimeUnit.SECONDS));
-    assertEquals(1, queues.scheduledActions.size());
-  }
-
-  //@Test
-  public void testMonkeyDoesntAddProb0Actions() throws Throwable {
-    ChaosCounter counter = new ChaosCounter();
-    monkey.addTarget("target", counter, 0);
-    assertEquals(0, monkey.getTargetCount());
-    monkey.play();
-    assertEquals(0, counter.count);
-  }
-
-  //@Test
-  public void testMonkeyScheduleProb0Actions() throws Throwable {
-    ChaosCounter counter = new ChaosCounter();
-    monkey.addTarget("target", counter, 0);
-    assertFalse(monkey.schedule(0, 1, TimeUnit.SECONDS));
-    assertEquals(0, queues.scheduledActions.size());
-  }
-
-  //@Test
-  public void testMonkeyPlaySometimes() throws Throwable {
-    ChaosCounter counter = new ChaosCounter();
-    ChaosCounter counter2 = new ChaosCounter();
-    monkey.addTarget("target1", counter, InternalKeys.PROBABILITY_PERCENT_1
-        * 50);
-    monkey.addTarget("target2", counter2, InternalKeys
-        .PROBABILITY_PERCENT_1 * 25);
-
-    for (int i = 0; i < 100; i++) {
-      monkey.play();
-    }
-    LOG.info("Counter1 = {} counter2 = {}", counter.count, counter2.count);
-    /*
-     * Relying on probability here to give approximate answers
-     */
-    assertTrue(counter.count > 25);
-    assertTrue(counter.count < 75);
-    assertTrue(counter2.count < counter.count);
-  }
-
-  //@Test
-  public void testAMKiller() throws Throwable {
-
-    ChaosKillAM chaos = new ChaosKillAM(queues, -1);
-    chaos.chaosAction();
-    assertEquals(1, queues.scheduledActions.size());
-    AsyncAction action = queues.scheduledActions.take();
-    assertTrue(action instanceof ActionHalt);
-  }
-
-  //@Test
-  public void testContainerKillerEmptyApp() throws Throwable {
-
-
-    ChaosKillContainer chaos = new ChaosKillContainer(appState,
-        queues,
-        new MockRMOperationHandler());
-    chaos.chaosAction();
-    assertEquals(0, queues.scheduledActions.size());
-  }
-
-  @Ignore
-  //@Test
-  public void testContainerKillerIgnoresAM() throws Throwable {
-    // TODO: AM needed in live container list?
-    addAppMastertoAppState();
-    assertEquals(1, appState.getLiveContainers().size());
-
-    ChaosKillContainer chaos = new ChaosKillContainer(appState,
-        queues,
-        new MockRMOperationHandler());
-    chaos.chaosAction();
-    assertEquals(0, queues.scheduledActions.size());
-  }
-
-  //@Test
-  public void testContainerKiller() throws Throwable {
-    MockRMOperationHandler ops = new MockRMOperationHandler();
-    getRole0Status().setDesired(1);
-    List<RoleInstance> instances = createAndStartNodes();
-    assertEquals(1, instances.size());
-    RoleInstance instance = instances.get(0);
-
-    ChaosKillContainer chaos = new ChaosKillContainer(appState, queues, ops);
-    chaos.chaosAction();
-    assertEquals(1, queues.scheduledActions.size());
-    AsyncAction action = queues.scheduledActions.take();
-    ActionKillContainer killer = (ActionKillContainer) action;
-    assertEquals(killer.getContainerId(), instance.getContainerId());
-    killer.execute(null, queues, appState);
-    assertEquals(1, ops.getNumReleases());
-
-    ContainerReleaseOperation operation = (ContainerReleaseOperation) ops
-        .getFirstOp();
-    assertEquals(operation.getContainerId(), instance.getContainerId());
-  }
-
-  /**
-   * Chaos target that just implements a counter.
-   */
-  private static class ChaosCounter implements ChaosTarget {
-    private int count;
-
-    @Override
-    public void chaosAction() {
-      count++;
-    }
-
-
-    @Override
-    public String toString() {
-      final StringBuilder sb = new StringBuilder(
-          "ChaosCounter{");
-      sb.append("count=").append(count);
-      sb.append('}');
-      return sb.toString();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java
deleted file mode 100644
index 6e77806..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/security/TestSecurityConfiguration.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.security;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.slider.api.resource.Application;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
-import org.apache.slider.core.exceptions.SliderException;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
-/**
- * Test security configuration.
- */
-public class TestSecurityConfiguration {
-
-  //@Test
-  public void testValidLocalConfiguration() throws Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL, "test");
-    compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH,
-        "/some/local/path");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    SecurityConfiguration securityConfiguration =
-        new SecurityConfiguration(config, application, "testCluster");
-  }
-
-  //@Test
-  public void testValidDistributedConfiguration() throws Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL, "test");
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    SecurityConfiguration securityConfiguration =
-        new SecurityConfiguration(config, application, "testCluster");
-  }
-
-  //@Test
-  public void testMissingPrincipalNoLoginWithDistributedConfig() throws
-      Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    try {
-      SecurityConfiguration securityConfiguration =
-          new SecurityConfiguration(config, application, "testCluster") {
-            @Override
-            protected UserGroupInformation getLoginUser() throws
-                IOException {
-              return null;
-            }
-          };
-      fail("expected SliderException");
-    } catch (SliderException e) {
-      // expected
-    }
-  }
-
-  //@Test
-  public void testMissingPrincipalNoLoginWithLocalConfig() throws Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH,
-        "/some/local/path");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    try {
-      SecurityConfiguration securityConfiguration =
-          new SecurityConfiguration(config, application, "testCluster") {
-            @Override
-            protected UserGroupInformation getLoginUser() throws IOException {
-              return null;
-            }
-          };
-      fail("expected SliderException");
-    } catch (SliderException e) {
-      // expected
-    }
-  }
-
-  //@Test
-  public void testBothKeytabMechanismsConfigured() throws Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_KEYTAB_PRINCIPAL, "test");
-    compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH,
-        "/some/local/path");
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    try {
-      SecurityConfiguration securityConfiguration =
-          new SecurityConfiguration(config, application,
-              "testCluster");
-      fail("expected SliderException");
-    } catch (SliderException e) {
-      // expected
-    }
-  }
-
-  //@Test
-  public void testMissingPrincipalButLoginWithDistributedConfig() throws
-      Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    SecurityConfiguration securityConfiguration =
-        new SecurityConfiguration(config, application, "testCluster");
-  }
-
-  //@Test
-  public void testMissingPrincipalButLoginWithLocalConfig() throws Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH,
-        "/some/local/path");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    SecurityConfiguration securityConfiguration =
-        new SecurityConfiguration(config, application, "testCluster");
-  }
-
-  //@Test
-  public void testKeypathLocationOnceLocalized() throws Throwable {
-    Configuration config = new Configuration();
-    config.set(CommonConfigurationKeysPublic
-        .HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    SecurityConfiguration securityConfiguration =
-        new SecurityConfiguration(config, application, "testCluster");
-
-    assertEquals(new File(SliderKeys.KEYTAB_DIR, "some.keytab")
-            .getAbsolutePath(),
-        securityConfiguration.getKeytabFile().getAbsolutePath());
-  }
-
-  //@Test
-  public void testAMKeytabProvided() throws Throwable {
-    Configuration config = new Configuration();
-    Map<String, String> compOps = new HashMap<>();
-    compOps.put(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH, " ");
-    Application application = new Application().configuration(new org.apache
-        .slider.api.resource.Configuration().properties(compOps));
-
-    SecurityConfiguration securityConfiguration =
-        new SecurityConfiguration(config, application, "testCluster");
-    assertFalse(securityConfiguration.isKeytabProvided());
-
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "");
-    assertFalse(securityConfiguration.isKeytabProvided());
-
-    compOps.put(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME, "some.keytab");
-    assertTrue(securityConfiguration.isKeytabProvided());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryMarshalling.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryMarshalling.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryMarshalling.java
deleted file mode 100644
index b887f28..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/PathEntryMarshalling.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.rest.registry;
-
-import org.apache.hadoop.registry.client.binding.JsonSerDeser;
-
-class PathEntryMarshalling
-    extends JsonSerDeser<PathEntryResource> {
-  public PathEntryMarshalling() {
-    super(PathEntryResource.class);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/TestRegistryRestMarshalling.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/TestRegistryRestMarshalling.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/TestRegistryRestMarshalling.java
deleted file mode 100644
index b0b0e31..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/rest/registry/TestRegistryRestMarshalling.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.rest.registry;
-
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes;
-import org.junit.Test;
-
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-
-/**
- * This test exists because Jersey appears to behave "oddly"
- * when it comes to marshalling JSON, and some of the REST resources
- * appear to have trouble.
- *
- * This test tries to isolate it
- */
-public class TestRegistryRestMarshalling {
-
-  //@Test
-  public void testDeser() throws Throwable {
-    PathEntryMarshalling pem = new PathEntryMarshalling();
-    PathEntryResource unmarshalled = pem.fromResource(
-        "/org/apache/slider/server/appmaster/web/rest/registry/sample.json");
-
-    ServiceRecord serviceRecord = unmarshalled.service;
-    assertNotNull(serviceRecord);
-    assertNotNull(serviceRecord.get(YarnRegistryAttributes.YARN_ID));
-    assertNotEquals("", serviceRecord.get(YarnRegistryAttributes
-        .YARN_PERSISTENCE));
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestClusterSpecificationBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestClusterSpecificationBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestClusterSpecificationBlock.java
deleted file mode 100644
index 43e4f39..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestClusterSpecificationBlock.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.slider.server.appmaster.model.mock.BaseMockAppStateTest;
-import org.apache.slider.server.appmaster.state.ProviderAppState;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.WebAppApiImpl;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-
-/**
- * Test cluster specification block.
- */
-public class TestClusterSpecificationBlock extends BaseMockAppStateTest {
-
-  private ClusterSpecificationBlock clusterSpecBlock;
-
-  @Before
-  public void setup() throws Exception {
-    super.setup();
-    ProviderAppState providerAppState = new ProviderAppState(
-        "undefined",
-        appState);
-
-    WebAppApiImpl inst = new WebAppApiImpl(
-        providerAppState,
-        null,
-        null, null);
-
-    Injector injector = Guice.createInjector(new AbstractModule() {
-          @Override
-          protected void configure() {
-            bind(WebAppApi.class).toInstance(inst);
-          }
-        });
-
-    clusterSpecBlock = injector.getInstance(ClusterSpecificationBlock.class);
-  }
-
-  //@Test
-  public void testJsonGeneration() {
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-
-    int level = hamlet.nestLevel();
-    clusterSpecBlock.doRender(hamlet);
-
-    assertEquals(level, hamlet.nestLevel());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestContainerStatsBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestContainerStatsBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestContainerStatsBlock.java
deleted file mode 100644
index 56f209c..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestContainerStatsBlock.java
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
-import org.apache.hadoop.yarn.webapp.hamlet.HamletImpl.EImp;
-import org.apache.slider.api.ClusterNode;
-import org.apache.slider.server.appmaster.model.mock.BaseMockAppStateTest;
-import org.apache.slider.server.appmaster.model.mock.MockContainer;
-import org.apache.slider.server.appmaster.model.mock.MockContainerId;
-import org.apache.slider.server.appmaster.model.mock.MockNodeId;
-import org.apache.slider.server.appmaster.model.mock.MockResource;
-import org.apache.slider.server.appmaster.state.ProviderAppState;
-import org.apache.slider.server.appmaster.state.RoleInstance;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.WebAppApiImpl;
-import org.apache.slider.server.appmaster.web.view.ContainerStatsBlock.ClusterNodeNameComparator;
-import org.apache.slider.server.appmaster.web.view.ContainerStatsBlock.TableAnchorContent;
-import org.apache.slider.server.appmaster.web.view.ContainerStatsBlock.TableContent;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Test container stats block.
- */
-public class TestContainerStatsBlock extends BaseMockAppStateTest {
-
-  private ContainerStatsBlock statsBlock;
-
-  private Container cont1, cont2;
-
-  @Before
-  public void setup() throws Exception {
-    super.setup();
-    ProviderAppState providerAppState = new ProviderAppState(
-        "undefined",
-        appState);
-
-    WebAppApiImpl inst = new WebAppApiImpl(
-        providerAppState,
-        null,
-        METRICS, null);
-
-    Injector injector = Guice.createInjector(new WebappModule(inst));
-    statsBlock = injector.getInstance(ContainerStatsBlock.class);
-
-    cont1 = new MockContainer();
-
-    cont1.setId(mockContainerId(0));
-    cont1.setNodeId(new MockNodeId());
-    cont1.setPriority(Priority.newInstance(1));
-    cont1.setResource(new MockResource(0, 0));
-
-    cont2 = new MockContainer();
-    cont2.setId(mockContainerId(1));
-    cont2.setNodeId(new MockNodeId());
-    cont2.setPriority(Priority.newInstance(1));
-    cont2.setResource(new MockResource(0, 0));
-  }
-
-  private static class WebappModule extends AbstractModule {
-    private final WebAppApiImpl instance;
-
-    WebappModule(WebAppApiImpl instance) {
-      this.instance = instance;
-    }
-
-    @Override
-    protected void configure() {
-      bind(WebAppApi.class).toInstance(instance);
-    }
-  }
-
-
-  public MockContainerId mockContainerId(int count) {
-    return new MockContainerId(applicationAttemptId, count);
-  }
-
-  //@Test
-  public void testGetContainerInstances() {
-    List<RoleInstance> roles = Arrays.asList(
-        new RoleInstance(cont1),
-        new RoleInstance(cont2)
-    );
-    Map<String, RoleInstance> map = statsBlock.getContainerInstances(roles);
-
-    assertEquals(2, map.size());
-
-    assertTrue(map.containsKey("mockcontainer_0"));
-    assertEquals(map.get("mockcontainer_0"), roles.get(0));
-
-    assertTrue(map.containsKey("mockcontainer_1"));
-    assertEquals(map.get("mockcontainer_1"), roles.get(1));
-  }
-
-  //@Test
-  public void testGenerateRoleDetailsWithTwoColumns() {
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-
-    // Make a div to put the content into
-    DIV<Hamlet> div = hamlet.div();
-
-    String detailsName = "testing";
-    String selector = "selector";
-    Map<TableContent, String> data = new HashMap<>();
-    data.put(new ContainerStatsBlock.TableContent("Foo"), "bar");
-
-    int levelPrior = hamlet.nestLevel();
-    statsBlock.generateRoleDetails(div, selector, detailsName, data.entrySet());
-
-    // Close out the div we made
-    // DIV<Hamlet>._() will actually invoke the wrong method (creating <p>),
-    // explicit cast to make sure we're closing out the <div>
-    ((EImp) div)._();
-
-    assertEquals(levelPrior, hamlet.nestLevel());
-  }
-
-  //@Test
-  public void testGenerateRoleDetailsWithOneColumn() {
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-    DIV<Hamlet> div = hamlet.div();
-
-    String detailsName = "testing";
-    String selector = "selector";
-    Map<TableContent, String> data = new HashMap<>();
-    data.put(new ContainerStatsBlock.TableContent("Bar"), null);
-
-    int levelPrior = hamlet.nestLevel();
-    statsBlock.generateRoleDetails(div, selector, detailsName, data.entrySet());
-
-    // Close out the div we made
-    // DIV<Hamlet>._() will actually invoke the wrong method (creating <p>),
-    // explicit cast to make sure we're closing out the <div>
-    ((EImp) div)._();
-
-    assertEquals(levelPrior, hamlet.nestLevel());
-  }
-
-  //@Test
-  public void testGenerateRoleDetailsWithNoData() {
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-    DIV<Hamlet> div = hamlet.div();
-
-    String detailsName = "testing";
-    String selector = "selector";
-    Map<TableContent, String> data = new HashMap<>();
-
-    int levelPrior = hamlet.nestLevel();
-    statsBlock.generateRoleDetails(div, selector, detailsName, data.entrySet());
-
-    // Close out the div we made
-    // DIV<Hamlet>._() will actually invoke the wrong method (creating <p>),
-    // explicit cast to make sure we're closing out the <div>
-    ((EImp) div)._();
-
-    assertEquals(levelPrior, hamlet.nestLevel());
-  }
-
-  //@Test
-  public void testClusterNodeNameComparator() {
-    ClusterNode n1 = new ClusterNode(mockContainerId(1)),
-        n2 = new ClusterNode(mockContainerId(2)),
-        n3 = new ClusterNode(mockContainerId(3));
-
-    List<ClusterNode> nodes = new ArrayList<ClusterNode>();
-    nodes.add(n2);
-    nodes.add(n3);
-    nodes.add(n1);
-
-    Collections.sort(nodes, new ClusterNodeNameComparator());
-
-    String prevName = "";
-    for (ClusterNode node : nodes) {
-      assertTrue(prevName.compareTo(node.name) <= 0);
-      prevName = node.name;
-    }
-  }
-
-  //@Test
-  public void testTableContent() {
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-    TableContent tc = new TableContent("foo");
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-    TR<TABLE<Hamlet>> tr = hamlet.table().tr();
-
-    int prevLevel = hamlet.nestLevel();
-    // printCell should not end the tr
-    tc.printCell(tr);
-    tr._();
-    assertEquals(prevLevel, hamlet.nestLevel());
-  }
-
-  //@Test
-  public void testTableAnchorContent() {
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-    TableContent tc = new TableAnchorContent("foo", "http://bar.com");
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-    TR<TABLE<Hamlet>> tr = hamlet.table().tr();
-
-    int prevLevel = hamlet.nestLevel();
-    // printCell should not end the tr
-    tc.printCell(tr);
-    tr._();
-    assertEquals(prevLevel, hamlet.nestLevel());
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestIndexBlock.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestIndexBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestIndexBlock.java
deleted file mode 100644
index eecf213..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/web/view/TestIndexBlock.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.server.appmaster.web.view;
-
-import com.google.inject.AbstractModule;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.slider.server.appmaster.model.appstate.BaseMockAppStateAATest;
-import org.apache.slider.server.appmaster.model.mock.MockContainer;
-import org.apache.slider.server.appmaster.model.mock.MockContainerId;
-import org.apache.slider.server.appmaster.model.mock.MockNodeId;
-import org.apache.slider.server.appmaster.model.mock.MockResource;
-import org.apache.slider.server.appmaster.state.ContainerOutcome;
-import org.apache.slider.server.appmaster.state.OutstandingRequest;
-import org.apache.slider.server.appmaster.state.ProviderAppState;
-import org.apache.slider.server.appmaster.state.RoleStatus;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.WebAppApiImpl;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.PrintWriter;
-import java.io.StringWriter;
-
-/**
- * Test index block.
- */
-public class TestIndexBlock extends BaseMockAppStateAATest {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(TestIndexBlock.class);
-
-  private IndexBlock indexBlock;
-
-  private Container cont1, cont2;
-
-  @Before
-  public void setup() throws Exception {
-    super.setup();
-    assertNotNull(appState);
-    ProviderAppState providerAppState = new ProviderAppState(
-        "undefined",
-        appState);
-
-    WebAppApiImpl inst = new WebAppApiImpl(
-        providerAppState,
-        null,
-        METRICS, null);
-
-    Injector injector = Guice.createInjector(new AbstractModule() {
-          @Override
-          protected void configure() {
-            bind(WebAppApi.class).toInstance(inst);
-          }
-        });
-
-    indexBlock = injector.getInstance(IndexBlock.class);
-
-    cont1 = new MockContainer();
-    cont1.setId(new MockContainerId(applicationAttemptId, 0));
-    cont1.setNodeId(new MockNodeId());
-    cont1.setPriority(Priority.newInstance(1));
-    cont1.setResource(new MockResource(0, 0));
-
-    cont2 = new MockContainer();
-    cont2.setId(new MockContainerId(applicationAttemptId, 1));
-    cont2.setNodeId(new MockNodeId());
-    cont2.setPriority(Priority.newInstance(1));
-    cont2.setResource(new MockResource(0, 0));
-  }
-
-  //@Test
-  public void testIndex() {
-    RoleStatus role0 = getRole0Status();
-    RoleStatus role1 = getRole1Status();
-    RoleStatus role2 = getRole2Status();
-
-    int role0Desired = 8;
-
-    role0.setDesired(role0Desired);
-    int role0Actual = 5;
-    int role0Requested = role0Desired - role0Actual;
-    for (int i = 0; i < role0Actual; i++) {
-      appState.incRunningContainers(role0);
-    }
-    assertEquals(role0.getRunning(), role0Actual);
-    for (int i = 0; i < role0Requested; i++) {
-      appState.incRequestedContainers(role0);
-    }
-    assertEquals(role0.getRequested(), role0Requested);
-
-    int role0Failures = 2;
-
-    appState.incFailedContainers(role0, ContainerOutcome.Failed);
-    appState.incFailedContainers(role0, ContainerOutcome.Failed);
-
-    RoleStatus aaRole = getAaRole();
-    // all aa roles fields are in the
-    int aaroleDesired = 200;
-    aaRole.setDesired(aaroleDesired);
-    int aaroleActual = 90;
-    int aaroleActive = 1;
-    int aaroleRequested = aaroleDesired - aaroleActual;
-    int aarolePending = aaroleRequested - 1;
-    int aaroleFailures = 0;
-    for (int i = 0; i < aaroleActual; i++) {
-      appState.incRunningContainers(aaRole);
-    }
-    assertEquals(aaRole.getRunning(), aaroleActual);
-    aaRole.setOutstandingAArequest(new OutstandingRequest(2, ""));
-    // add a requested
-    appState.incRequestedContainers(aaRole);
-    aaRole.getComponentMetrics().pendingAAContainers.set(aarolePending);
-    assertEquals(aaRole.getAAPending(), aarolePending);
-
-    assertEquals(aaRole.getActualAndRequested(), aaroleActual + 1);
-    StringWriter sw = new StringWriter(64);
-    PrintWriter pw = new PrintWriter(sw);
-
-    Hamlet hamlet = new Hamlet(pw, 0, false);
-
-    indexBlock.doIndex(hamlet, "accumulo");
-
-    String body = sw.toString();
-    LOG.info(body);
-    // verify role data came out
-    assertTrue(body.contains("role0"));
-    assertContains(role0Desired, body);
-    assertContains(role0Actual, body);
-    assertContains(role0Requested, body);
-    assertContains(role0Failures, body);
-
-    assertTrue(body.contains("role1"));
-    assertTrue(body.contains("role2"));
-
-    assertContains(aaroleDesired, body);
-    assertContains(aaroleActual, body);
-//    assertContains(aaroleRequested, body)
-    assertContains(aaroleFailures, body);
-    assertTrue(body.contains(indexBlock.buildAADetails(true, aarolePending)));
-
-    // verify that the sorting took place
-    assertTrue(body.indexOf("role0") < body.indexOf("role1"));
-    assertTrue(body.indexOf("role1") < body.indexOf("role2"));
-
-    assertFalse(body.contains(IndexBlock.ALL_CONTAINERS_ALLOCATED));
-    // role
-  }
-
-  void assertContains(int ex, String html) {
-    assertStringContains(Integer.toString(ex), html);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/management/TestGauges.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/management/TestGauges.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/management/TestGauges.java
deleted file mode 100644
index 11ebabe..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/management/TestGauges.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.management;
-
-import org.apache.slider.server.appmaster.management.LongGauge;
-import org.apache.slider.utils.SliderTestBase;
-import org.junit.Test;
-
-/**
- * Test gauges.
- */
-public class TestGauges extends SliderTestBase {
-
-  //@Test
-  public void testLongGaugeOperations() throws Throwable {
-    LongGauge gauge = new LongGauge();
-    assertEquals(0, gauge.get());
-    gauge.inc();
-    assertEquals(1, gauge.get());
-    gauge.inc();
-    assertEquals(2, gauge.get());
-    gauge.inc();
-    assertEquals(3, gauge.get());
-    assertEquals(gauge.getValue().longValue(), gauge.get());
-    assertEquals(gauge.getCount().longValue(), gauge.get());
-
-    gauge.dec();
-    assertEquals(2, gauge.get());
-    assertEquals(1, gauge.decToFloor(1));
-    assertEquals(1, gauge.get());
-    assertEquals(0, gauge.decToFloor(1));
-    assertEquals(0, gauge.decToFloor(1));
-    assertEquals(0, gauge.decToFloor(0));
-
-    gauge.set(4);
-    assertEquals(0, gauge.decToFloor(8));
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/MockService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/MockService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/MockService.java
deleted file mode 100644
index 588f621..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/MockService.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.service.ServiceStateException;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-public class MockService extends AbstractService {
-  private final boolean fail;
-  private final int lifespan;
-  private final ExecutorService executorService =
-      Executors.newSingleThreadExecutor();
-
-  MockService() {
-    this("mock", false, -1);
-  }
-
-  MockService(String name, boolean fail, int lifespan) {
-    super(name);
-    this.fail = fail;
-    this.lifespan = lifespan;
-  }
-
-  @Override
-  protected void serviceStart() throws Exception {
-    //act on the lifespan here
-    if (lifespan > 0) {
-      executorService.submit(new Runnable() {
-        @Override
-        public void run() {
-          try {
-            Thread.sleep(lifespan);
-          } catch (InterruptedException ignored) {
-
-          }
-          finish();
-        }
-      });
-    } else {
-      if (lifespan == 0) {
-        finish();
-      } else {
-        //continue until told not to
-      }
-    }
-  }
-
-  void finish() {
-    if (fail) {
-      ServiceStateException e =
-          new ServiceStateException(getName() + " failed");
-
-      noteFailure(e);
-      stop();
-      throw e;
-    } else {
-      stop();
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ParentWorkflowTestBase.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ParentWorkflowTestBase.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ParentWorkflowTestBase.java
deleted file mode 100644
index a11a1cf..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ParentWorkflowTestBase.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.apache.hadoop.service.Service;
-
-/**
- * Extends {@link WorkflowServiceTestBase} with parent-specific operations
- * and logic to build up and run the parent service
- */
-public abstract class ParentWorkflowTestBase extends WorkflowServiceTestBase {
-
-  /**
-   * Wait a second for the service parent to stop
-   * @param parent the service to wait for
-   */
-  protected void waitForParentToStop(ServiceParent parent) {
-    waitForParentToStop(parent, 1000);
-  }
-
-  /**
-   * Wait for the service parent to stop
-   * @param parent the service to wait for
-   * @param timeout time in milliseconds
-   */
-  protected void waitForParentToStop(ServiceParent parent, int timeout) {
-    boolean stop = parent.waitForServiceToStop(timeout);
-    if (!stop) {
-      logState(parent);
-      fail("Service failed to stop : after " + timeout + " millis " + parent);
-    }
-  }
-
-  /**
-   * Subclasses are require to implement this and return an instance of a
-   * ServiceParent
-   * @param services a possibly empty list of services
-   * @return an inited -but -not-started- service parent instance
-   */
-  protected abstract ServiceParent buildService(Service... services);
-
-  /**
-   * Use {@link #buildService(Service...)} to create service and then start it
-   * @param services
-   * @return
-   */
-  protected ServiceParent startService(Service... services) {
-    ServiceParent parent = buildService(services);
-    //expect service to start and stay started
-    parent.start();
-    return parent;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ProcessCommandFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ProcessCommandFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ProcessCommandFactory.java
deleted file mode 100644
index 4a19417..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/ProcessCommandFactory.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.apache.hadoop.util.Shell;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * A source of commands, with the goal being to allow for adding different
- * implementations for different platforms
- */
-public class ProcessCommandFactory {
-
-  protected ProcessCommandFactory() {
-  }
-
-  /**
-   * The command to list a directory
-   * @param dir directory
-   * @return commands
-   */
-  public List<String> ls(File dir) {
-    List<String> commands;
-    if (!Shell.WINDOWS) {
-      commands = Arrays.asList("ls","-1", dir.getAbsolutePath());
-    } else {
-      commands = Arrays.asList("cmd", "/c", "dir", dir.getAbsolutePath());
-    }
-    return commands;
-  }
-
-  /**
-   * Echo some text to stdout
-   * @param text text
-   * @return commands
-   */
-  public List<String> echo(String text) {
-    List<String> commands = new ArrayList<String>(5);
-    commands.add("echo");
-    commands.add(text);
-    return commands;
-  }
-
-  /**
-   * print env variables
-   * @return commands
-   */
-  public List<String> env() {
-    List<String> commands;
-    if (!Shell.WINDOWS) {
-      commands = Arrays.asList("env");
-    } else {
-      commands = Arrays.asList("cmd", "/c", "set");
-    }
-    return commands;
-  }
-
-  /**
-   * execute a command that returns with an error code that will
-   * be converted into a number
-   * @return commands
-   */
-  public List<String> exitFalse() {
-    List<String> commands = new ArrayList<String>(2);
-    commands.add("false");
-    return commands;
-  }
-
-  /**
-   * Create a process command factory for this OS
-   * @return
-   */
-  public static ProcessCommandFactory createProcessCommandFactory() {
-    return new ProcessCommandFactory();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/SimpleRunnable.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/SimpleRunnable.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/SimpleRunnable.java
deleted file mode 100644
index 1f330f4..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/SimpleRunnable.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-/**
- * Test runnable that can be made to exit, or throw an exception
- * during its run
- */
-class SimpleRunnable implements Runnable {
-  boolean throwException = false;
-
-
-  SimpleRunnable() {
-  }
-
-  SimpleRunnable(boolean throwException) {
-    this.throwException = throwException;
-  }
-
-  @Override
-  public synchronized void run() {
-    try {
-      if (throwException) {
-        throw new RuntimeException("SimpleRunnable");
-      }
-    } finally {
-      this.notify();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowClosingService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowClosingService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowClosingService.java
deleted file mode 100644
index 19f40e9..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowClosingService.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Test;
-
-import java.io.Closeable;
-import java.io.IOException;
-
-public class TestWorkflowClosingService extends WorkflowServiceTestBase {
-
-  //@Test
-  public void testSimpleClose() throws Throwable {
-    ClosingService<OpenClose> svc = instance(false);
-    OpenClose openClose = svc.getCloseable();
-    assertFalse(openClose.closed);
-    svc.stop();
-    assertTrue(openClose.closed);
-  }
-
-  //@Test
-  public void testNullClose() throws Throwable {
-    ClosingService<OpenClose> svc = new ClosingService<OpenClose>("", null);
-    svc.init(new Configuration());
-    svc.start();
-    assertNull(svc.getCloseable());
-    svc.stop();
-  }
-
-  //@Test
-  public void testFailingClose() throws Throwable {
-    ClosingService<OpenClose> svc = instance(false);
-    OpenClose openClose = svc.getCloseable();
-    openClose.raiseExceptionOnClose = true;
-    svc.stop();
-    assertTrue(openClose.closed);
-    Throwable cause = svc.getFailureCause();
-    assertNotNull(cause);
-
-    //retry should be a no-op
-    svc.close();
-  }
-
-  //@Test
-  public void testDoubleClose() throws Throwable {
-    ClosingService<OpenClose> svc = instance(false);
-    OpenClose openClose = svc.getCloseable();
-    openClose.raiseExceptionOnClose = true;
-    svc.stop();
-    assertTrue(openClose.closed);
-    Throwable cause = svc.getFailureCause();
-    assertNotNull(cause);
-    openClose.closed = false;
-    svc.stop();
-    assertEquals(cause, svc.getFailureCause());
-  }
-
-  /**
-   * This does not recurse forever, as the service has already entered the
-   * STOPPED state before the inner close tries to stop it -that operation
-   * is a no-op
-   * @throws Throwable
-   */
-  //@Test
-  public void testCloseSelf() throws Throwable {
-    ClosingService<ClosingService> svc =
-        new ClosingService<ClosingService>("");
-    svc.setCloseable(svc);
-    svc.stop();
-  }
-
-
-  private ClosingService<OpenClose> instance(boolean raiseExceptionOnClose) {
-    ClosingService<OpenClose> svc = new ClosingService<OpenClose>(new OpenClose(
-        raiseExceptionOnClose));
-    svc.init(new Configuration());
-    svc.start();
-    return svc;
-  }
-
-  private static class OpenClose implements Closeable {
-    public boolean closed = false;
-    public boolean raiseExceptionOnClose;
-
-    private OpenClose(boolean raiseExceptionOnClose) {
-      this.raiseExceptionOnClose = raiseExceptionOnClose;
-    }
-
-    @Override
-    public void close() throws IOException {
-      if (!closed) {
-        closed = true;
-        if (raiseExceptionOnClose) {
-          throw new IOException("OpenClose");
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowCompositeService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowCompositeService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowCompositeService.java
deleted file mode 100644
index 0cd1ac9..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowCompositeService.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.Service;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestWorkflowCompositeService extends ParentWorkflowTestBase {
-  private static final Logger
-      log = LoggerFactory.getLogger(TestWorkflowCompositeService.class);
-
-  //@Test
-  public void testSingleChild() throws Throwable {
-    Service parent = startService(new MockService());
-    parent.stop();
-  }
-
-  //@Test
-  public void testSingleChildTerminating() throws Throwable {
-    ServiceParent parent =
-        startService(new MockService("1", false, 100));
-    waitForParentToStop(parent);
-  }
-
-  //@Test
-  public void testSingleChildFailing() throws Throwable {
-    ServiceParent parent =
-        startService(new MockService("1", true, 100));
-    waitForParentToStop(parent);
-    assert parent.getFailureCause() != null;
-  }
-
-  //@Test
-  public void testTwoChildren() throws Throwable {
-    MockService one = new MockService("one", false, 100);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = startService(one, two);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(two);
-  }
-
-  //@Test
-  public void testCallableChild() throws Throwable {
-
-    MockService one = new MockService("one", false, 100);
-    CallableHandler handler = new CallableHandler("hello");
-    WorkflowCallbackService<String> ens =
-        new WorkflowCallbackService<String>("handler", handler, 100, true);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = startService(one, ens, two);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(ens);
-    assertStopped(two);
-    assertTrue(handler.notified);
-    String s = ens.getScheduledFuture().get();
-    assertEquals("hello", s);
-  }
-
-  //@Test
-  public void testNestedComposite() throws Throwable {
-    MockService one = new MockService("one", false, 100);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = buildService(one, two);
-    ServiceParent outer = startService(parent);
-    assertTrue(outer.waitForServiceToStop(1000));
-    assertStopped(one);
-    assertStopped(two);
-  }
-
-  //@Test
-  public void testFailingComposite() throws Throwable {
-    MockService one = new MockService("one", true, 10);
-    MockService two = new MockService("two", false, 1000);
-    ServiceParent parent = startService(one, two);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(two);
-    assertNotNull(one.getFailureCause());
-    assertNotNull(parent.getFailureCause());
-    assertEquals(one.getFailureCause(), parent.getFailureCause());
-  }
-
-  @Override
-  public ServiceParent buildService(Service... services) {
-    ServiceParent parent =
-        new WorkflowCompositeService("test", services);
-    parent.init(new Configuration());
-    return parent;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowExecutorService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowExecutorService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowExecutorService.java
deleted file mode 100644
index 38cc886..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowExecutorService.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.junit.Test;
-
-import java.util.concurrent.ExecutorService;
-
-
-/**
- * Basic tests for executor service
- */
-public class TestWorkflowExecutorService extends WorkflowServiceTestBase {
-
-  //@Test
-  public void testAsyncRun() throws Throwable {
-
-    ExecutorSvc svc = run(new ExecutorSvc());
-    ServiceTerminatingRunnable runnable = new ServiceTerminatingRunnable(svc,
-        new SimpleRunnable());
-
-    // synchronous in-thread execution
-    svc.execute(runnable);
-    Thread.sleep(1000);
-    assertStopped(svc);
-  }
-
-  //@Test
-  public void testFailureRun() throws Throwable {
-
-    ExecutorSvc svc = run(new ExecutorSvc());
-    ServiceTerminatingRunnable runnable = new ServiceTerminatingRunnable(svc,
-        new SimpleRunnable(true));
-
-    // synchronous in-thread execution
-    svc.execute(runnable);
-    Thread.sleep(1000);
-    assertStopped(svc);
-    assertNotNull(runnable.getException());
-  }
-
-  private static class ExecutorSvc
-      extends WorkflowExecutorService<ExecutorService> {
-    private ExecutorSvc() {
-      super("ExecutorService",
-          ServiceThreadFactory.singleThreadExecutor("test", true));
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowRpcService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowRpcService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowRpcService.java
deleted file mode 100644
index 758c64f..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowRpcService.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.ipc.RPC;
-import org.apache.hadoop.ipc.Server;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-
-public class TestWorkflowRpcService extends WorkflowServiceTestBase {
-
-  //@Test
-  public void testCreateMockRPCService() throws Throwable {
-    MockRPC rpc = new MockRPC();
-    rpc.start();
-    assertTrue(rpc.started);
-    rpc.getListenerAddress();
-    rpc.stop();
-    assertTrue(rpc.stopped);
-  }
-
-  //@Test
-  public void testLifecycle() throws Throwable {
-    MockRPC rpc = new MockRPC();
-    WorkflowRpcService svc = new WorkflowRpcService("test", rpc);
-    run(svc);
-    assertTrue(rpc.started);
-    svc.getConnectAddress();
-    svc.stop();
-    assertTrue(rpc.stopped);
-  }
-
-  //@Test
-  public void testStartFailure() throws Throwable {
-    MockRPC rpc = new MockRPC();
-    rpc.failOnStart = true;
-    WorkflowRpcService svc = new WorkflowRpcService("test", rpc);
-    svc.init(new Configuration());
-    try {
-      svc.start();
-      fail("expected an exception");
-    } catch (RuntimeException e) {
-      assertEquals("failOnStart", e.getMessage());
-    }
-    svc.stop();
-    assertTrue(rpc.stopped);
-  }
-
-  private static class MockRPC extends Server {
-
-    public boolean stopped;
-    public boolean started;
-    public boolean failOnStart;
-
-    private MockRPC() throws IOException {
-      super("localhost", 0, null, 1, new Configuration());
-    }
-
-    @Override
-    public synchronized void start() {
-      if (failOnStart) {
-        throw new RuntimeException("failOnStart");
-      }
-      started = true;
-      super.start();
-    }
-
-    @Override
-    public synchronized void stop() {
-      stopped = true;
-      super.stop();
-    }
-
-    @Override
-    public synchronized InetSocketAddress getListenerAddress() {
-      return super.getListenerAddress();
-    }
-
-    @Override
-    public Writable call(RPC.RpcKind rpcKind,
-        String protocol,
-        Writable param,
-        long receiveTime) throws Exception {
-      return null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowSequenceService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowSequenceService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowSequenceService.java
deleted file mode 100644
index b683641..0000000
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/services/workflow/TestWorkflowSequenceService.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.services.workflow;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.Service;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestWorkflowSequenceService extends ParentWorkflowTestBase {
-  private static final Logger
-      log = LoggerFactory.getLogger(TestWorkflowSequenceService.class);
-
-  //@Test
-  public void testSingleSequence() throws Throwable {
-    ServiceParent parent = startService(new MockService());
-    parent.stop();
-  }
-
-  //@Test
-  public void testEmptySequence() throws Throwable {
-    ServiceParent parent = startService();
-    waitForParentToStop(parent);
-  }
-
-  //@Test
-  public void testSequence() throws Throwable {
-    MockService one = new MockService("one", false, 100);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = startService(one, two);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(two);
-    assert ((WorkflowSequenceService) parent).getPreviousService().equals(two);
-  }
-
-  //@Test
-  public void testCallableChild() throws Throwable {
-
-    MockService one = new MockService("one", false, 100);
-    CallableHandler handler = new CallableHandler("hello");
-    WorkflowCallbackService<String> ens =
-        new WorkflowCallbackService<String>("handler", handler, 100, true);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = startService(one, ens, two);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(ens);
-    assertStopped(two);
-    assertTrue(handler.notified);
-    String s = ens.getScheduledFuture().get();
-    assertEquals("hello", s);
-  }
-
-
-  //@Test
-  public void testFailingSequence() throws Throwable {
-    MockService one = new MockService("one", true, 100);
-    MockService two = new MockService("two", false, 100);
-    WorkflowSequenceService parent =
-        (WorkflowSequenceService) startService(one, two);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertInState(two, Service.STATE.NOTINITED);
-    assertEquals(one, parent.getPreviousService());
-  }
-
-
-  //@Test
-  public void testFailInStartNext() throws Throwable {
-    MockService one = new MockService("one", false, 100);
-    MockService two = new MockService("two", true, 0);
-    MockService three = new MockService("3", false, 0);
-    ServiceParent parent = startService(one, two, three);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(two);
-    Throwable failureCause = two.getFailureCause();
-    assertNotNull(failureCause);
-    Throwable parentFailureCause = parent.getFailureCause();
-    assertNotNull(parentFailureCause);
-    assertEquals(parentFailureCause, failureCause);
-    assertInState(three, Service.STATE.NOTINITED);
-  }
-
-  //@Test
-  public void testSequenceInSequence() throws Throwable {
-    MockService one = new MockService("one", false, 100);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = buildService(one, two);
-    ServiceParent outer = startService(parent);
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(two);
-  }
-
-  //@Test
-  public void testVarargsConstructor() throws Throwable {
-    MockService one = new MockService("one", false, 100);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = new WorkflowSequenceService("test", one, two);
-    parent.init(new Configuration());
-    parent.start();
-    waitForParentToStop(parent);
-    assertStopped(one);
-    assertStopped(two);
-  }
-
-
-  //@Test
-  public void testAddChild() throws Throwable {
-    MockService one = new MockService("one", false, 5000);
-    MockService two = new MockService("two", false, 100);
-    ServiceParent parent = startService(one, two);
-    CallableHandler handler = new CallableHandler("hello");
-    WorkflowCallbackService<String> ens =
-        new WorkflowCallbackService<String>("handler", handler, 100, true);
-    parent.addService(ens);
-    waitForParentToStop(parent, 10000);
-    assertStopped(one);
-    assertStopped(two);
-    assertStopped(ens);
-    assertStopped(two);
-    assertEquals("hello", ens.getScheduledFuture().get());
-  }
-
-  public WorkflowSequenceService buildService(Service... services) {
-    WorkflowSequenceService parent =
-        new WorkflowSequenceService("test", services);
-    parent.init(new Configuration());
-    return parent;
-  }
-
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org