You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/12/01 21:03:39 UTC

[11/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
deleted file mode 100644
index 7a45405..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestMemoryApplicationHistoryStore extends
-    ApplicationHistoryStoreTestUtils {
-
-  @Before
-  public void setup() {
-    store = new MemoryApplicationHistoryStore();
-  }
-
-  @Test
-  public void testReadWriteApplicationHistory() throws Exception {
-    // Out of order
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    try {
-      writeApplicationFinishData(appId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
-        "is stored before the start information"));
-    }
-    // Normal
-    int numApps = 5;
-    for (int i = 1; i <= numApps; ++i) {
-      appId = ApplicationId.newInstance(0, i);
-      writeApplicationStartData(appId);
-      writeApplicationFinishData(appId);
-    }
-    Assert.assertEquals(numApps, store.getAllApplications().size());
-    for (int i = 1; i <= numApps; ++i) {
-      appId = ApplicationId.newInstance(0, i);
-      ApplicationHistoryData data = store.getApplication(appId);
-      Assert.assertNotNull(data);
-      Assert.assertEquals(appId.toString(), data.getApplicationName());
-      Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo());
-    }
-    // Write again
-    appId = ApplicationId.newInstance(0, 1);
-    try {
-      writeApplicationStartData(appId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-    try {
-      writeApplicationFinishData(appId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-  }
-
-  @Test
-  public void testReadWriteApplicationAttemptHistory() throws Exception {
-    // Out of order
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    try {
-      writeApplicationAttemptFinishData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
-        "is stored before the start information"));
-    }
-    // Normal
-    int numAppAttempts = 5;
-    writeApplicationStartData(appId);
-    for (int i = 1; i <= numAppAttempts; ++i) {
-      appAttemptId = ApplicationAttemptId.newInstance(appId, i);
-      writeApplicationAttemptStartData(appAttemptId);
-      writeApplicationAttemptFinishData(appAttemptId);
-    }
-    Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId)
-      .size());
-    for (int i = 1; i <= numAppAttempts; ++i) {
-      appAttemptId = ApplicationAttemptId.newInstance(appId, i);
-      ApplicationAttemptHistoryData data =
-          store.getApplicationAttempt(appAttemptId);
-      Assert.assertNotNull(data);
-      Assert.assertEquals(appAttemptId.toString(), data.getHost());
-      Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo());
-    }
-    writeApplicationFinishData(appId);
-    // Write again
-    appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
-    try {
-      writeApplicationAttemptStartData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-    try {
-      writeApplicationAttemptFinishData(appAttemptId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-  }
-
-  @Test
-  public void testReadWriteContainerHistory() throws Exception {
-    // Out of order
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
-    try {
-      writeContainerFinishData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains(
-        "is stored before the start information"));
-    }
-    // Normal
-    writeApplicationAttemptStartData(appAttemptId);
-    int numContainers = 5;
-    for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newInstance(appAttemptId, i);
-      writeContainerStartData(containerId);
-      writeContainerFinishData(containerId);
-    }
-    Assert
-      .assertEquals(numContainers, store.getContainers(appAttemptId).size());
-    for (int i = 1; i <= numContainers; ++i) {
-      containerId = ContainerId.newInstance(appAttemptId, i);
-      ContainerHistoryData data = store.getContainer(containerId);
-      Assert.assertNotNull(data);
-      Assert.assertEquals(Priority.newInstance(containerId.getId()),
-        data.getPriority());
-      Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo());
-    }
-    ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
-    Assert.assertNotNull(masterContainer);
-    Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
-      masterContainer.getContainerId());
-    writeApplicationAttemptFinishData(appAttemptId);
-    // Write again
-    containerId = ContainerId.newInstance(appAttemptId, 1);
-    try {
-      writeContainerStartData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-    try {
-      writeContainerFinishData(containerId);
-      Assert.fail();
-    } catch (IOException e) {
-      Assert.assertTrue(e.getMessage().contains("is already stored"));
-    }
-  }
-
-  @Test
-  public void testMassiveWriteContainerHistory() throws IOException {
-    long mb = 1024 * 1024;
-    Runtime runtime = Runtime.getRuntime();
-    long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb;
-    int numContainers = 100000;
-    ApplicationId appId = ApplicationId.newInstance(0, 1);
-    ApplicationAttemptId appAttemptId =
-        ApplicationAttemptId.newInstance(appId, 1);
-    for (int i = 1; i <= numContainers; ++i) {
-      ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
-      writeContainerStartData(containerId);
-      writeContainerFinishData(containerId);
-    }
-    long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb;
-    Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java
deleted file mode 100644
index 499dab6..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
-
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.Json;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestAppMetrics {
-  private static final String SAMPLE_SINGLE_METRIC_HOST_JSON = "{\n" +
-    "  \"metrics\" : [ {\n" +
-    "    \"instanceid\" : \"\",\n" +
-    "    \"hostname\" : \"localhost\",\n" +
-    "    \"metrics\" : {\n" +
-    "      \"0\" : \"5.35\",\n" +
-    "      \"5000\" : \"5.35\",\n" +
-    "      \"10000\" : \"5.35\",\n" +
-    "      \"15000\" : \"5.35\"\n" +
-    "    },\n" +
-    "    \"starttime\" : \"1411663170112\",\n" +
-    "    \"appid\" : \"HOST\",\n" +
-    "    \"metricname\" : \"disk_free\"\n" +
-    "  } ]\n" +
-    "}";
-
-  private static final String SAMPLE_TWO_METRIC_HOST_JSON = "{\n" +
-    "  \"metrics\" : [ {\n" +
-    "    \"instanceid\" : \"\",\n" +
-    "    \"hostname\" : \"localhost\",\n" +
-    "    \"metrics\" : {\n" +
-    "      \"0\" : \"5.35\",\n" +
-    "      \"5000\" : \"5.35\",\n" +
-    "      \"10000\" : \"5.35\",\n" +
-    "      \"15000\" : \"5.35\"\n" +
-    "    },\n" +
-    "    \"starttime\" : \"0\",\n" +
-    "    \"appid\" : \"HOST\",\n" +
-    "    \"metricname\" : \"disk_free\"\n" +
-    "  }, {\n" +
-    "    \"instanceid\" : \"\",\n" +
-    "    \"hostname\" : \"localhost\",\n" +
-    "    \"metrics\" : {\n" +
-    "      \"0\" : \"94.0\",\n" +
-    "      \"5000\" : \"94.0\",\n" +
-    "      \"10000\" : \"94.0\",\n" +
-    "      \"15000\" : \"94.0\"\n" +
-    "    },\n" +
-    "    \"starttime\" : \"0\",\n" +
-    "    \"appid\" : \"HOST\",\n" +
-    "    \"metricname\" : \"mem_cached\"\n" +
-    "  } ]\n" +
-    "}";
-
-  private long[] timestamps;
-
-  @Before
-  public void setUp() throws Exception {
-    timestamps = new long[4];
-    timestamps[0] = 0;
-    timestamps[1] = timestamps[0] + 5000;
-    timestamps[2] = timestamps[1] + 5000;
-    timestamps[3] = timestamps[2] + 5000;
-
-  }
-
-  @Test
-  public void testHostDiskMetricsSerialization() throws IOException {
-    long timestamp = 1411663170112L;
-    AppMetrics appMetrics = new AppMetrics(new ApplicationInstance("localhost", AppID.HOST, ""), timestamp);
-
-    Metric diskFree = appMetrics.createMetric("disk_free");
-    double value = 5.35;
-
-    diskFree.putMetric(timestamps[0], Double.toString(value));
-    diskFree.putMetric(timestamps[1], Double.toString(value));
-    diskFree.putMetric(timestamps[2], Double.toString(value));
-    diskFree.putMetric(timestamps[3], Double.toString(value));
-
-    appMetrics.addMetric(diskFree);
-
-    String expected = SAMPLE_SINGLE_METRIC_HOST_JSON;
-    String s = new Json(true).serialize(appMetrics);
-
-    assertEquals("Serialized Host Metrics", expected, s);
-  }
-
-
-  @Test
-  public void testSingleHostManyMetricsSerialization() throws IOException {
-    AppMetrics appMetrics = new AppMetrics(new ApplicationInstance("localhost", AppID.HOST, ""), timestamps[0]);
-
-    Metric diskFree = appMetrics.createMetric("disk_free");
-    double value = 5.35;
-    diskFree.putMetric(timestamps[0], Double.toString(value));
-    diskFree.putMetric(timestamps[1], Double.toString(value));
-    diskFree.putMetric(timestamps[2], Double.toString(value));
-    diskFree.putMetric(timestamps[3], Double.toString(value));
-
-    appMetrics.addMetric(diskFree);
-
-    Metric memCache = appMetrics.createMetric("mem_cached");
-    double memVal = 94;
-    memCache.putMetric(timestamps[0], Double.toString(memVal));
-    memCache.putMetric(timestamps[1], Double.toString(memVal));
-    memCache.putMetric(timestamps[2], Double.toString(memVal));
-    memCache.putMetric(timestamps[3], Double.toString(memVal));
-
-    appMetrics.addMetric(memCache);
-
-    String expected = SAMPLE_TWO_METRIC_HOST_JSON;
-    String s = new Json(true).serialize(appMetrics);
-
-    assertEquals("Serialized Host Metrics", expected, s);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
deleted file mode 100644
index a0572a2..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
-
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.Json;
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.entry;
-import static org.junit.Assert.assertEquals;
-
-public class TestMetric {
-  private static final String SAMPLE_METRIC_IN_JSON = "{\n" +
-    "  \"instanceid\" : \"\",\n" +
-    "  \"hostname\" : \"localhost\",\n" +
-    "  \"metrics\" : {\n" +
-    "    \"0\" : \"5.35\",\n" +
-    "    \"5000\" : \"5.35\",\n" +
-    "    \"10000\" : \"5.35\",\n" +
-    "    \"15000\" : \"5.35\"\n" +
-    "  },\n" +
-    "  \"starttime\" : \"0\",\n" +
-    "  \"appid\" : \"HOST\",\n" +
-    "  \"metricname\" : \"disk_free\"\n" +
-    "}";
-
-  @Test
-  public void testSerializeToJson() throws IOException {
-    Metric diskOnHostMetric = new Metric(new ApplicationInstance("localhost", AppID.HOST, ""), "disk_free", 0);
-
-    long timestamp = 0;
-    double value = 5.35;
-
-    diskOnHostMetric.putMetric(timestamp, Double.toString(value));
-    diskOnHostMetric.putMetric(timestamp + 5000, Double.toString(value));
-    diskOnHostMetric.putMetric(timestamp + 10000, Double.toString(value));
-    diskOnHostMetric.putMetric(timestamp + 15000, Double.toString(value));
-
-    String expected = SAMPLE_METRIC_IN_JSON;
-    String s = new Json(true).serialize(diskOnHostMetric);
-
-    assertEquals("Json should match", expected, s);
-  }
-
-  @Test
-  public void testDeserializeObjectFromString() throws IOException {
-    String source = SAMPLE_METRIC_IN_JSON;
-
-    Metric m = new Json().deserialize(source, Metric.class);
-
-    assertEquals("localhost", m.getHostname());
-    assertEquals("HOST", m.getAppid());
-    assertEquals("", m.getInstanceid());
-    assertEquals("disk_free", m.getMetricname());
-    assertEquals("0", m.getStarttime());
-
-    assertThat(m.getMetrics()).isNotEmpty().hasSize(4).contains(
-      entry("0", "5.35"),
-      entry("5000", "5.35"),
-      entry("10000", "5.35"),
-      entry("15000", "5.35"));
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
deleted file mode 100644
index 4411be5..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.net;
-
-import org.junit.Test;
-
-import java.io.IOException;
-
-import static org.easymock.EasyMock.*;
-import static org.junit.Assert.assertEquals;
-
-public class TestRestMetricsSender {
-
-  @Test
-  public void testPushMetrics() throws Exception {
-    final UrlService svcMock = createStrictMock(UrlService.class);
-    final String payload = "test";
-    final String expectedResponse = "mockResponse";
-
-    expect(svcMock.send(anyString())).andReturn(expectedResponse);
-    svcMock.disconnect();
-    expectLastCall();
-
-    replay(svcMock);
-
-    RestMetricsSender sender = new RestMetricsSender("expectedHostName") {
-      @Override
-      protected UrlService getConnectedUrlService() throws IOException {
-        return svcMock;
-      }
-    };
-    String response = sender.pushMetrics(payload);
-
-    verify(svcMock);
-    assertEquals("", expectedResponse, response);
-  }
-
-  @Test
-  public void testPushMetricsFailed() throws Exception {
-    final UrlService svcMock = createStrictMock(UrlService.class);
-    final String payload = "test";
-    final String expectedResponse = "mockResponse";
-    RestMetricsSender sender = new RestMetricsSender("expectedHostName") {
-      @Override
-      protected UrlService getConnectedUrlService() throws IOException {
-        return svcMock;
-      }
-    };
-
-    expect(svcMock.send(anyString())).andThrow(new IOException());
-    svcMock.disconnect();
-    expectLastCall();
-
-    replay(svcMock);
-
-    String response = sender.pushMetrics(payload);
-
-    verify(svcMock);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
deleted file mode 100644
index 7e29ae3..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net;
-
-
-import org.junit.Test;
-
-import java.io.ByteArrayOutputStream;
-import java.io.PrintStream;
-
-public class TestStdOutMetricsSender {
-
-  @Test
-  public void testPushMetrics() throws Exception {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    PrintStream out = new PrintStream(baos);
-    StdOutMetricsSender sender = new StdOutMetricsSender("expectedHostName", out);
-    sender.pushMetrics("test");
-
-    System.out.println(baos.toString());
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
deleted file mode 100644
index 462aaf0..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util;
-
-import org.junit.Test;
-
-import static org.junit.Assert.assertTrue;
-
-public class TestRandomMetricsProvider {
-
-  @Test
-  public void testReturnSingle() {
-    double from = 5.25;
-    double to = 5.40;
-    RandomMetricsProvider provider = new RandomMetricsProvider(from, to);
-    double metric = provider.next();
-
-    assertTrue("Generated metric should be in range", from < metric && metric < to);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
deleted file mode 100644
index dd513aa..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util;
-
-import org.junit.Test;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.junit.Assert.assertEquals;
-
-public class TestTimeStampProvider {
-
-  @Test
-  public void testReturnSingle() {
-    long startTime = 1411663170112L;
-    int timeStep = 5000;
-    TimeStampProvider tm = new TimeStampProvider(startTime, timeStep, 0);
-
-    long tStamp = tm.next();
-
-    assertEquals("First generated timestamp should match starttime", startTime, tStamp);
-  }
-
-  @Test
-  public void testReturnTstampsForSendInterval() throws Exception {
-    long startTime = 0;
-    int collectInterval = 5;
-    int sendInterval = 30;
-    TimeStampProvider tsp = new TimeStampProvider(startTime, collectInterval, sendInterval);
-
-    long[] timestamps = tsp.timestampsForNextInterval();
-
-    assertThat(timestamps)
-      .hasSize(6)
-      .containsOnly(0, 5, 10, 15, 20, 25);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
deleted file mode 100644
index 96b8a83..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import com.google.common.collect.Maps;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.phoenix.hbase.index.write.IndexWriterUtils;
-import org.apache.phoenix.query.BaseTest;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.util.PropertiesUtil;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
-import static org.assertj.core.api.Assertions.assertThat;
-
-public abstract class AbstractMiniHBaseClusterTest extends BaseTest {
-
-  protected static final long BATCH_SIZE = 3;
-
-  @BeforeClass
-  public static void doSetup() throws Exception {
-    Map<String, String> props = getDefaultProps();
-    props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(5000));
-    props.put(IndexWriterUtils.HTABLE_THREAD_KEY, Integer.toString(100));
-    // Make a small batch size to test multiple calls to reserve sequences
-    props.put(QueryServices.SEQUENCE_CACHE_SIZE_ATTRIB,
-      Long.toString(BATCH_SIZE));
-    // Must update config before starting server
-    setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
-  }
-
-  @AfterClass
-  public static void doTeardown() throws Exception {
-    dropNonSystemTables();
-  }
-
-  @After
-  public void cleanUpAfterTest() throws Exception {
-    deletePriorTables(HConstants.LATEST_TIMESTAMP, getUrl());
-  }
-
-  public static Map<String, String> getDefaultProps() {
-    Map<String, String> props = new HashMap<String, String>();
-    // Must update config before starting server
-    props.put(QueryServices.STATS_USE_CURRENT_TIME_ATTRIB,
-      Boolean.FALSE.toString());
-    props.put("java.security.krb5.realm", "");
-    props.put("java.security.krb5.kdc", "");
-    return props;
-  }
-
-  protected Connection getConnection(String url) throws SQLException {
-    Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-    Connection conn = DriverManager.getConnection(getUrl(), props);
-    return conn;
-  }
-
-  /**
-   * A canary test. Will show if the infrastructure is set-up correctly.
-   */
-  @Test
-  public void testClusterOK() throws Exception {
-    Connection conn = getConnection(getUrl());
-    conn.setAutoCommit(true);
-
-    String sampleDDL = "CREATE TABLE TEST_METRICS " +
-      "(TEST_COLUMN VARCHAR " +
-      "CONSTRAINT pk PRIMARY KEY (TEST_COLUMN)) " +
-      "DATA_BLOCK_ENCODING='FAST_DIFF', IMMUTABLE_ROWS=true, " +
-      "TTL=86400, COMPRESSION='NONE' ";
-
-    Statement stmt = conn.createStatement();
-    stmt.executeUpdate(sampleDDL);
-    conn.commit();
-
-    ResultSet rs = stmt.executeQuery(
-      "SELECT COUNT(TEST_COLUMN) FROM TEST_METRICS");
-
-    rs.next();
-    long l = rs.getLong(1);
-    assertThat(l).isGreaterThanOrEqualTo(0);
-
-    stmt.execute("DROP TABLE TEST_METRICS");
-    conn.close();
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
deleted file mode 100644
index 1430478..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
-import org.apache.phoenix.jdbc.PhoenixTestDriver;
-import org.apache.phoenix.query.BaseTest;
-import org.apache.phoenix.util.PropertiesUtil;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.apache.phoenix.util.TestUtil;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
-import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-public abstract class AbstractPhoenixConnectionlessTest extends BaseTest {
-
-  protected static String getUrl() {
-    return TestUtil.PHOENIX_CONNECTIONLESS_JDBC_URL;
-  }
-
-  protected static String getUrl(String tenantId) {
-    return getUrl() + ';' + TENANT_ID_ATTRIB + '=' + tenantId;
-  }
-
-  protected static PhoenixTestDriver driver;
-
-  private static void startServer(String url) throws Exception {
-    assertNull(driver);
-    // only load the test driver if we are testing locally - for integration tests, we want to
-    // test on a wider scale
-    if (PhoenixEmbeddedDriver.isTestUrl(url)) {
-      driver = initDriver(ReadOnlyProps.EMPTY_PROPS);
-      assertTrue(DriverManager.getDriver(url) == driver);
-      driver.connect(url, PropertiesUtil.deepCopy(TEST_PROPERTIES));
-    }
-  }
-
-  protected static synchronized PhoenixTestDriver initDriver(ReadOnlyProps props) throws Exception {
-    if (driver == null) {
-      driver = new PhoenixTestDriver(props);
-      DriverManager.registerDriver(driver);
-    }
-    return driver;
-  }
-
-  private String connUrl;
-
-  @Before
-  public void setup() throws Exception {
-    connUrl = getUrl();
-    startServer(connUrl);
-  }
-
-  @Test
-  public void testStorageSystemInitialized() throws Exception {
-    String sampleDDL = "CREATE TABLE TEST_METRICS (TEST_COLUMN VARCHAR " +
-      "CONSTRAINT pk PRIMARY KEY (TEST_COLUMN)) DATA_BLOCK_ENCODING='FAST_DIFF', " +
-      "IMMUTABLE_ROWS=true, TTL=86400, COMPRESSION='SNAPPY'";
-
-    Connection conn = null;
-    PreparedStatement stmt = null;
-    try {
-      conn = DriverManager.getConnection(connUrl);
-      stmt = conn.prepareStatement(sampleDDL);
-      stmt.execute();
-      conn.commit();
-    } finally {
-      if (stmt != null) {
-        stmt.close();
-      }
-      if (conn != null) {
-        conn.close();
-      }
-    }
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    if (driver != null) {
-      try {
-        driver.close();
-      } finally {
-        PhoenixTestDriver phoenixTestDriver = driver;
-        driver = null;
-        DriverManager.deregisterDriver(phoenixTestDriver);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITClusterAggregator.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITClusterAggregator.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITClusterAggregator.java
deleted file mode 100644
index f7e53f5..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITClusterAggregator.java
+++ /dev/null
@@ -1,384 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.fail;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_CLUSTER_AGGREGATE_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LOG;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
-
-public class ITClusterAggregator extends AbstractMiniHBaseClusterTest {
-  private Connection conn;
-  private PhoenixHBaseAccessor hdb;
-
-  @Before
-  public void setUp() throws Exception {
-    hdb = createTestableHBaseAccessor();
-    // inits connection, starts mini cluster
-    conn = getConnection(getUrl());
-
-    hdb.initMetricSchema();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    Connection conn = getConnection(getUrl());
-    Statement stmt = conn.createStatement();
-
-    stmt.execute("delete from METRIC_AGGREGATE");
-    stmt.execute("delete from METRIC_AGGREGATE_HOURLY");
-    stmt.execute("delete from METRIC_RECORD");
-    stmt.execute("delete from METRIC_RECORD_HOURLY");
-    stmt.execute("delete from METRIC_RECORD_MINUTE");
-    conn.commit();
-
-    stmt.close();
-    conn.close();
-  }
-
-  @Test
-  public void testShouldAggregateClusterProperly() throws Exception {
-    // GIVEN
-    TimelineMetricClusterAggregator agg =
-      new TimelineMetricClusterAggregator(hdb, new Configuration());
-
-    long startTime = System.currentTimeMillis();
-    long ctime = startTime;
-    long minute = 60 * 1000;
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
-      "disk_free", 1));
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
-      "disk_free", 2));
-    ctime += minute;
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
-      "disk_free", 2));
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
-      "disk_free", 1));
-
-    // WHEN
-    long endTime = ctime + minute;
-    boolean success = agg.doWork(startTime, endTime);
-
-    //THEN
-    Condition condition = new Condition(null, null, null, null, startTime,
-      endTime, null, true);
-    condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA)));
-
-    PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
-      (conn, condition);
-    ResultSet rs = pstmt.executeQuery();
-
-    int recordCount = 0;
-    while (rs.next()) {
-      TimelineClusterMetric currentMetric =
-        PhoenixHBaseAccessor.getTimelineMetricClusterKeyFromResultSet(rs);
-      MetricClusterAggregate currentHostAggregate =
-        PhoenixHBaseAccessor.getMetricClusterAggregateFromResultSet(rs);
-
-      if ("disk_free".equals(currentMetric.getMetricName())) {
-        assertEquals(2, currentHostAggregate.getNumberOfHosts());
-        assertEquals(2.0, currentHostAggregate.getMax());
-        assertEquals(1.0, currentHostAggregate.getMin());
-        assertEquals(3.0, currentHostAggregate.getSum());
-        recordCount++;
-      } else {
-        fail("Unexpected entry");
-      }
-    }
-  }
-
-
-  @Test
-  public void testShouldAggregateDifferentMetricsOnClusterProperly()
-    throws Exception {
-    // GIVEN
-    TimelineMetricClusterAggregator agg =
-      new TimelineMetricClusterAggregator(hdb, new Configuration());
-
-    // here we put some metrics tha will be aggregated
-    long startTime = System.currentTimeMillis();
-    long ctime = startTime;
-    long minute = 60 * 1000;
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
-      "disk_free", 1));
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
-      "disk_free", 2));
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
-      "disk_used", 1));
-
-    ctime += minute;
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
-      "disk_free", 2));
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local2",
-      "disk_free", 1));
-    hdb.insertMetricRecords(prepareSingleTimelineMetric(ctime, "local1",
-      "disk_used", 1));
-
-    // WHEN
-    long endTime = ctime + minute;
-    boolean success = agg.doWork(startTime, endTime);
-
-    //THEN
-    Condition condition = new Condition(null, null, null, null, startTime,
-      endTime, null, true);
-    condition.setStatement(String.format(GET_CLUSTER_AGGREGATE_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA)));
-
-    PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
-      (conn, condition);
-    ResultSet rs = pstmt.executeQuery();
-
-    int recordCount = 0;
-    while (rs.next()) {
-      TimelineClusterMetric currentMetric =
-        PhoenixHBaseAccessor.getTimelineMetricClusterKeyFromResultSet(rs);
-      MetricClusterAggregate currentHostAggregate =
-        PhoenixHBaseAccessor.getMetricClusterAggregateFromResultSet(rs);
-
-      if ("disk_free".equals(currentMetric.getMetricName())) {
-        assertEquals(2, currentHostAggregate.getNumberOfHosts());
-        assertEquals(2.0, currentHostAggregate.getMax());
-        assertEquals(1.0, currentHostAggregate.getMin());
-        assertEquals(3.0, currentHostAggregate.getSum());
-        recordCount++;
-      } else if ("disk_used".equals(currentMetric.getMetricName())) {
-        assertEquals(1, currentHostAggregate.getNumberOfHosts());
-        assertEquals(1.0, currentHostAggregate.getMax());
-        assertEquals(1.0, currentHostAggregate.getMin());
-        assertEquals(1.0, currentHostAggregate.getSum());
-        recordCount++;
-      } else {
-        fail("Unexpected entry");
-      }
-    }
-  }
-
-
-  @Test
-  public void testShouldAggregateClusterOnHourProperly() throws Exception {
-    // GIVEN
-    TimelineMetricClusterAggregatorHourly agg =
-      new TimelineMetricClusterAggregatorHourly(hdb, new Configuration());
-
-    // this time can be virtualized! or made independent from real clock
-    long startTime = System.currentTimeMillis();
-    long ctime = startTime;
-    long minute = 60 * 1000;
-
-    Map<TimelineClusterMetric, MetricClusterAggregate> records =
-      new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
-
-    records.put(createEmptyTimelineMetric(ctime),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric(ctime += minute),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric(ctime += minute),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric(ctime += minute),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-
-    hdb.saveClusterAggregateRecords(records);
-
-    // WHEN
-    agg.doWork(startTime, ctime + minute);
-
-    // THEN
-    ResultSet rs = executeQuery("SELECT * FROM METRIC_AGGREGATE_HOURLY");
-    int count = 0;
-    while (rs.next()) {
-      assertEquals("METRIC_NAME", "disk_used", rs.getString("METRIC_NAME"));
-      assertEquals("APP_ID", "test_app", rs.getString("APP_ID"));
-      assertEquals("METRIC_SUM", 16.0, rs.getDouble("METRIC_SUM"));
-      assertEquals("METRIC_COUNT", 8, rs.getLong("METRIC_COUNT"));
-      assertEquals("METRIC_MAX", 4.0, rs.getDouble("METRIC_MAX"));
-      assertEquals("METRIC_MIN", 0.0, rs.getDouble("METRIC_MIN"));
-      count++;
-    }
-
-    assertEquals("One hourly aggregated row expected ", 1, count);
-  }
-
-  @Test
-  public void testShouldAggregateDifferentMetricsOnHourProperly() throws
-    Exception {
-    // GIVEN
-    TimelineMetricClusterAggregatorHourly agg =
-      new TimelineMetricClusterAggregatorHourly(hdb, new Configuration());
-
-    long startTime = System.currentTimeMillis();
-    long ctime = startTime;
-    long minute = 60 * 1000;
-
-    Map<TimelineClusterMetric, MetricClusterAggregate> records =
-      new HashMap<TimelineClusterMetric, MetricClusterAggregate>();
-
-    records.put(createEmptyTimelineMetric("disk_used", ctime),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
-      new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
-
-    records.put(createEmptyTimelineMetric("disk_used", ctime += minute),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
-      new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
-
-    records.put(createEmptyTimelineMetric("disk_used", ctime += minute),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
-      new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
-
-    records.put(createEmptyTimelineMetric("disk_used", ctime += minute),
-      new MetricClusterAggregate(4.0, 2, 0.0, 4.0, 0.0));
-    records.put(createEmptyTimelineMetric("disk_free", ctime),
-      new MetricClusterAggregate(1.0, 2, 0.0, 1.0, 1.0));
-
-    hdb.saveClusterAggregateRecords(records);
-
-    // WHEN
-    agg.doWork(startTime, ctime + minute);
-
-    // THEN
-    ResultSet rs = executeQuery("SELECT * FROM METRIC_AGGREGATE_HOURLY");
-    int count = 0;
-    while (rs.next()) {
-      if ("disk_used".equals(rs.getString("METRIC_NAME"))) {
-        assertEquals("APP_ID", "test_app", rs.getString("APP_ID"));
-        assertEquals("METRIC_SUM", 16.0, rs.getDouble("METRIC_SUM"));
-        assertEquals("METRIC_COUNT", 8, rs.getLong("METRIC_COUNT"));
-        assertEquals("METRIC_MAX", 4.0, rs.getDouble("METRIC_MAX"));
-        assertEquals("METRIC_MIN", 0.0, rs.getDouble("METRIC_MIN"));
-      } else if ("disk_free".equals(rs.getString("METRIC_NAME"))) {
-        assertEquals("APP_ID", "test_app", rs.getString("APP_ID"));
-        assertEquals("METRIC_SUM", 4.0, rs.getDouble("METRIC_SUM"));
-        assertEquals("METRIC_COUNT", 8, rs.getLong("METRIC_COUNT"));
-        assertEquals("METRIC_MAX", 1.0, rs.getDouble("METRIC_MAX"));
-        assertEquals("METRIC_MIN", 1.0, rs.getDouble("METRIC_MIN"));
-      }
-
-      count++;
-    }
-
-    assertEquals("Two hourly aggregated row expected ", 2, count);
-  }
-
-  private ResultSet executeQuery(String query) throws SQLException {
-    Connection conn = getConnection(getUrl());
-    Statement stmt = conn.createStatement();
-    return stmt.executeQuery(query);
-  }
-
-  private TimelineClusterMetric createEmptyTimelineMetric(String name,
-                                                          long startTime) {
-    TimelineClusterMetric metric = new TimelineClusterMetric(name,
-      "test_app", null, startTime, null);
-
-    return metric;
-  }
-
-  private TimelineClusterMetric createEmptyTimelineMetric(long startTime) {
-    return createEmptyTimelineMetric("disk_used", startTime);
-  }
-
-  private MetricHostAggregate
-  createMetricHostAggregate(double max, double min, int numberOfSamples,
-                            double sum) {
-    MetricHostAggregate expectedAggregate =
-      new MetricHostAggregate();
-    expectedAggregate.setMax(max);
-    expectedAggregate.setMin(min);
-    expectedAggregate.setNumberOfSamples(numberOfSamples);
-    expectedAggregate.setSum(sum);
-
-    return expectedAggregate;
-  }
-
-  private PhoenixHBaseAccessor createTestableHBaseAccessor() {
-    Configuration metricsConf = new Configuration();
-    metricsConf.set(
-      TimelineMetricConfiguration.HBASE_COMPRESSION_SCHEME, "NONE");
-
-    return
-      new PhoenixHBaseAccessor(
-        new Configuration(),
-        metricsConf,
-        new ConnectionProvider() {
-          @Override
-          public Connection getConnection() {
-            Connection connection = null;
-            try {
-              connection = DriverManager.getConnection(getUrl());
-            } catch (SQLException e) {
-              LOG.warn("Unable to connect to HBase store using Phoenix.", e);
-            }
-            return connection;
-          }
-        });
-  }
-
-  private TimelineMetrics prepareSingleTimelineMetric(long startTime,
-                                                      String host,
-                                                      String metricName,
-                                                      double val) {
-    TimelineMetrics m = new TimelineMetrics();
-    m.setMetrics(Arrays.asList(
-      createTimelineMetric(startTime, metricName, host, val)));
-
-    return m;
-  }
-
-  private TimelineMetric createTimelineMetric(long startTime,
-                                              String metricName,
-                                              String host,
-                                              double val) {
-    TimelineMetric m = new TimelineMetric();
-    m.setAppId("host");
-    m.setHostName(host);
-    m.setMetricName(metricName);
-    m.setStartTime(startTime);
-    Map<Long, Double> vals = new HashMap<Long, Double>();
-    vals.put(startTime + 15000l, val);
-    vals.put(startTime + 30000l, val);
-    vals.put(startTime + 45000l, val);
-    vals.put(startTime + 60000l, val);
-
-    m.setMetricValues(vals);
-
-    return m;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITMetricAggregator.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITMetricAggregator.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITMetricAggregator.java
deleted file mode 100644
index d166a22..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITMetricAggregator.java
+++ /dev/null
@@ -1,310 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.Map;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.Assert.fail;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_METRIC_AGGREGATE_ONLY_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.LOG;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
-import static org.assertj.core.api.Assertions.assertThat;
-
-public class ITMetricAggregator extends AbstractMiniHBaseClusterTest {
-  private Connection conn;
-  private PhoenixHBaseAccessor hdb;
-
-  @Before
-  public void setUp() throws Exception {
-    hdb = createTestableHBaseAccessor();
-    // inits connection, starts mini cluster
-    conn = getConnection(getUrl());
-
-    hdb.initMetricSchema();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-    Connection conn = getConnection(getUrl());
-    Statement stmt = conn.createStatement();
-
-    stmt.execute("delete from METRIC_AGGREGATE");
-    stmt.execute("delete from METRIC_AGGREGATE_HOURLY");
-    stmt.execute("delete from METRIC_RECORD");
-    stmt.execute("delete from METRIC_RECORD_HOURLY");
-    stmt.execute("delete from METRIC_RECORD_MINUTE");
-    conn.commit();
-
-    stmt.close();
-    conn.close();
-  }
-
-  @Test
-  public void testShouldInsertMetrics() throws Exception {
-    // GIVEN
-
-    // WHEN
-    long startTime = System.currentTimeMillis();
-    TimelineMetrics metricsSent = prepareTimelineMetrics(startTime, "local");
-    hdb.insertMetricRecords(metricsSent);
-
-    Condition queryCondition = new Condition(null, "local", null, null,
-      startTime, startTime + (15 * 60 * 1000), null, false);
-    TimelineMetrics recordRead = hdb.getMetricRecords(queryCondition);
-
-    // THEN
-    assertThat(recordRead.getMetrics()).hasSize(2)
-      .extracting("metricName")
-      .containsOnly("mem_free", "disk_free");
-
-    assertThat(metricsSent.getMetrics())
-      .usingElementComparator(TIME_IGNORING_COMPARATOR)
-      .containsExactlyElementsOf(recordRead.getMetrics());
-  }
-
-  @Test
-  public void testShouldAggregateMinuteProperly() throws Exception {
-    // GIVEN
-//    TimelineMetricAggregatorMinute aggregatorMinute =
-//      new TimelineMetricAggregatorMinute(hdb, new Configuration());
-    TimelineMetricAggregator aggregatorMinute = TimelineMetricAggregatorFactory
-      .createTimelineMetricAggregatorMinute(hdb, new Configuration());
-
-    long startTime = System.currentTimeMillis();
-    long ctime = startTime;
-    long minute = 60 * 1000;
-    hdb.insertMetricRecords(prepareTimelineMetrics(startTime, "local"));
-    hdb.insertMetricRecords(prepareTimelineMetrics(ctime += minute, "local"));
-    hdb.insertMetricRecords(prepareTimelineMetrics(ctime += minute, "local"));
-    hdb.insertMetricRecords(prepareTimelineMetrics(ctime += minute, "local"));
-    hdb.insertMetricRecords(prepareTimelineMetrics(ctime += minute, "local"));
-
-    // WHEN
-    long endTime = startTime + 1000 * 60 * 4;
-    boolean success = aggregatorMinute.doWork(startTime, endTime);
-
-    //THEN
-    Condition condition = new Condition(null, null, null, null, startTime,
-      endTime, null, true);
-    condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
-      METRICS_AGGREGATE_MINUTE_TABLE_NAME));
-
-    PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
-      (conn, condition);
-    ResultSet rs = pstmt.executeQuery();
-    MetricHostAggregate expectedAggregate =
-      createMetricHostAggregate(2.0, 0.0, 20, 15.0);
-
-    int count = 0;
-    while (rs.next()) {
-      TimelineMetric currentMetric =
-        PhoenixHBaseAccessor.getTimelineMetricKeyFromResultSet(rs);
-      MetricHostAggregate currentHostAggregate =
-        PhoenixHBaseAccessor.getMetricHostAggregateFromResultSet(rs);
-
-      if ("disk_free".equals(currentMetric.getMetricName())) {
-        assertEquals(2.0, currentHostAggregate.getMax());
-        assertEquals(0.0, currentHostAggregate.getMin());
-        assertEquals(20, currentHostAggregate.getNumberOfSamples());
-        assertEquals(15.0, currentHostAggregate.getSum());
-        assertEquals(15.0 / 20, currentHostAggregate.getAvg());
-        count++;
-      } else if ("mem_free".equals(currentMetric.getMetricName())) {
-        assertEquals(2.0, currentHostAggregate.getMax());
-        assertEquals(0.0, currentHostAggregate.getMin());
-        assertEquals(20, currentHostAggregate.getNumberOfSamples());
-        assertEquals(15.0, currentHostAggregate.getSum());
-        assertEquals(15.0 / 20, currentHostAggregate.getAvg());
-        count++;
-      } else {
-        fail("Unexpected entry");
-      }
-    }
-    assertEquals("Two aggregated entries expected", 2, count);
-  }
-
-  @Test
-  public void testShouldAggregateHourProperly() throws Exception {
-    // GIVEN
-//    TimelineMetricAggregatorHourly aggregator =
-//      new TimelineMetricAggregatorHourly(hdb, new Configuration());
-
-    TimelineMetricAggregator aggregator = TimelineMetricAggregatorFactory
-      .createTimelineMetricAggregatorHourly(hdb, new Configuration());
-    long startTime = System.currentTimeMillis();
-
-    MetricHostAggregate expectedAggregate =
-      createMetricHostAggregate(2.0, 0.0, 20, 15.0);
-    Map<TimelineMetric, MetricHostAggregate>
-      aggMap = new HashMap<TimelineMetric,
-      MetricHostAggregate>();
-
-    int min_5 = 5 * 60 * 1000;
-    long ctime = startTime - min_5;
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-    aggMap.put(createEmptyTimelineMetric(ctime += min_5), expectedAggregate);
-
-    hdb.saveHostAggregateRecords(aggMap, METRICS_AGGREGATE_MINUTE_TABLE_NAME);
-
-    //WHEN
-    long endTime = ctime + min_5;
-    boolean success = aggregator.doWork(startTime, endTime);
-    assertTrue(success);
-
-    //THEN
-    Condition condition = new Condition(null, null, null, null, startTime,
-      endTime, null, true);
-    condition.setStatement(String.format(GET_METRIC_AGGREGATE_ONLY_SQL,
-      PhoenixTransactSQL.getNaiveTimeRangeHint(startTime, NATIVE_TIME_RANGE_DELTA),
-      METRICS_AGGREGATE_HOURLY_TABLE_NAME));
-
-    PreparedStatement pstmt = PhoenixTransactSQL.prepareGetMetricsSqlStmt
-      (conn, condition);
-    ResultSet rs = pstmt.executeQuery();
-
-    while (rs.next()) {
-      TimelineMetric currentMetric =
-        PhoenixHBaseAccessor.getTimelineMetricKeyFromResultSet(rs);
-      MetricHostAggregate currentHostAggregate =
-        PhoenixHBaseAccessor.getMetricHostAggregateFromResultSet(rs);
-
-      if ("disk_used".equals(currentMetric.getMetricName())) {
-        assertEquals(2.0, currentHostAggregate.getMax());
-        assertEquals(0.0, currentHostAggregate.getMin());
-        assertEquals(12 * 20, currentHostAggregate.getNumberOfSamples());
-        assertEquals(12 * 15.0, currentHostAggregate.getSum());
-        assertEquals(15.0 / 20, currentHostAggregate.getAvg());
-      }
-    }
-  }
-
-  private TimelineMetric createEmptyTimelineMetric(long startTime) {
-    TimelineMetric metric = new TimelineMetric();
-    metric.setMetricName("disk_used");
-    metric.setAppId("test_app");
-    metric.setHostName("test_host");
-    metric.setTimestamp(startTime);
-
-    return metric;
-  }
-
-  private MetricHostAggregate
-  createMetricHostAggregate(double max, double min, int numberOfSamples,
-                            double sum) {
-    MetricHostAggregate expectedAggregate =
-      new MetricHostAggregate();
-    expectedAggregate.setMax(max);
-    expectedAggregate.setMin(min);
-    expectedAggregate.setNumberOfSamples(numberOfSamples);
-    expectedAggregate.setSum(sum);
-
-    return expectedAggregate;
-  }
-
-  private PhoenixHBaseAccessor createTestableHBaseAccessor() {
-    Configuration metricsConf = new Configuration();
-    metricsConf.set(
-      TimelineMetricConfiguration.HBASE_COMPRESSION_SCHEME, "NONE");
-
-    return
-      new PhoenixHBaseAccessor(
-        new Configuration(),
-        metricsConf,
-        new ConnectionProvider() {
-          @Override
-          public Connection getConnection() {
-            Connection connection = null;
-            try {
-              connection = DriverManager.getConnection(getUrl());
-            } catch (SQLException e) {
-              LOG.warn("Unable to connect to HBase store using Phoenix.", e);
-            }
-            return connection;
-          }
-        });
-  }
-
-  private final static Comparator<TimelineMetric> TIME_IGNORING_COMPARATOR =
-    new Comparator<TimelineMetric>() {
-      @Override
-      public int compare(TimelineMetric o1, TimelineMetric o2) {
-        return o1.equalsExceptTime(o2) ? 0 : 1;
-      }
-    };
-
-  private TimelineMetrics prepareTimelineMetrics(long startTime, String host) {
-    TimelineMetrics metrics = new TimelineMetrics();
-    metrics.setMetrics(Arrays.asList(
-      createMetric(startTime, "disk_free", host),
-      createMetric(startTime, "mem_free", host)));
-
-    return metrics;
-  }
-
-  private TimelineMetric createMetric(long startTime,
-                                      String metricName,
-                                      String host) {
-    TimelineMetric m = new TimelineMetric();
-    m.setAppId("host");
-    m.setHostName(host);
-    m.setMetricName(metricName);
-    m.setStartTime(startTime);
-    Map<Long, Double> vals = new HashMap<Long, Double>();
-    vals.put(startTime + 15000l, 0.0);
-    vals.put(startTime + 30000l, 0.0);
-    vals.put(startTime + 45000l, 1.0);
-    vals.put(startTime + 60000l, 2.0);
-
-    m.setMetricValues(vals);
-
-    return m;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestClusterSuite.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestClusterSuite.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestClusterSuite.java
deleted file mode 100644
index 0722ccd..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestClusterSuite.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-
-import org.junit.runner.RunWith;
-import org.junit.runners.Suite;
-
-import static org.junit.runners.Suite.SuiteClasses;
-
-@RunWith(Suite.class)
-@SuiteClasses({ITMetricAggregator.class, ITClusterAggregator.class})
-public class TestClusterSuite {
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
deleted file mode 100644
index 5d8ba96..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline;
-
-import org.junit.Test;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-public class TestMetricHostAggregate {
-
-  @Test
-  public void testCreateAggregate() throws Exception {
-    // given
-    MetricHostAggregate aggregate = createAggregate(3.0, 1.0, 2.0, 2);
-
-    //then
-    assertThat(aggregate.getSum()).isEqualTo(3.0);
-    assertThat(aggregate.getMin()).isEqualTo(1.0);
-    assertThat(aggregate.getMax()).isEqualTo(2.0);
-    assertThat(aggregate.getAvg()).isEqualTo(3.0 / 2);
-  }
-
-  @Test
-  public void testUpdateAggregates() throws Exception {
-    // given
-    MetricHostAggregate aggregate = createAggregate(3.0, 1.0, 2.0, 2);
-
-    //when
-    aggregate.updateAggregates(createAggregate(8.0, 0.5, 7.5, 2));
-    aggregate.updateAggregates(createAggregate(1.0, 1.0, 1.0, 1));
-
-    //then
-    assertThat(aggregate.getSum()).isEqualTo(12.0);
-    assertThat(aggregate.getMin()).isEqualTo(0.5);
-    assertThat(aggregate.getMax()).isEqualTo(7.5);
-    assertThat(aggregate.getAvg()).isEqualTo((3.0 + 8.0 + 1.0) / 5);
-  }
-
-  private MetricHostAggregate createAggregate
-    (double sum, double min, double max, int samplesCount) {
-    MetricHostAggregate aggregate = new MetricHostAggregate();
-    aggregate.setSum(sum);
-    aggregate.setMax(max);
-    aggregate.setMin(min);
-    aggregate.setDeviation(0.0);
-    aggregate.setNumberOfSamples(samplesCount);
-    return aggregate;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
deleted file mode 100644
index 758f5a9..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import java.util.Arrays;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
-
-public class TestPhoenixTransactSQL {
-  @Test
-  public void testConditionClause() throws Exception {
-    Condition condition = new Condition(
-      Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
-        1407959718L, 1407959918L, null, false);
-
-    String preparedClause = condition.getConditionClause();
-    String expectedClause = "METRIC_NAME IN (?, ?) AND HOSTNAME = ? AND " +
-      "APP_ID = ? AND INSTANCE_ID = ? AND SERVER_TIME >= ? AND SERVER_TIME < ?";
-
-    Assert.assertNotNull(preparedClause);
-    Assert.assertEquals(expectedClause, preparedClause);
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
deleted file mode 100644
index c893314..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-public class TestTimelineMetricStore implements TimelineMetricStore {
-  @Override
-  public TimelineMetrics getTimelineMetrics(List<String> metricNames,
-      String hostname, String applicationId, String instanceId, Long startTime,
-      Long endTime, Integer limit, boolean groupedByHost) throws SQLException,
-    IOException {
-    TimelineMetrics timelineMetrics = new TimelineMetrics();
-    List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
-    timelineMetrics.setMetrics(metricList);
-    TimelineMetric metric1 = new TimelineMetric();
-    TimelineMetric metric2 = new TimelineMetric();
-    metricList.add(metric1);
-    metricList.add(metric2);
-    metric1.setMetricName("cpu_user");
-    metric1.setAppId("1");
-    metric1.setInstanceId(null);
-    metric1.setHostName("c6401");
-    metric1.setStartTime(1407949812L);
-    metric1.setMetricValues(new HashMap<Long, Double>() {{
-      put(1407949812L, 1.0d);
-      put(1407949912L, 1.8d);
-      put(1407950002L, 0.7d);
-    }});
-
-    metric2.setMetricName("mem_free");
-    metric2.setAppId("2");
-    metric2.setInstanceId("3");
-    metric2.setHostName("c6401");
-    metric2.setStartTime(1407949812L);
-    metric2.setMetricValues(new HashMap<Long, Double>() {{
-      put(1407949812L, 2.5d);
-      put(1407949912L, 3.0d);
-      put(1407950002L, 0.9d);
-    }});
-
-    return timelineMetrics;
-  }
-
-  @Override
-  public TimelineMetric getTimelineMetric(String metricName, String hostname,
-      String applicationId, String instanceId, Long startTime, Long endTime,
-      Integer limit) throws SQLException, IOException {
-
-    return null;
-  }
-
-  @Override
-  public TimelinePutResponse putMetrics(TimelineMetrics metrics)
-      throws SQLException, IOException {
-
-    return new TimelinePutResponse();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
deleted file mode 100644
index d684a27..0000000
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class TestGenericObjectMapper {
-
-  @Test
-  public void testEncoding() {
-    testEncoding(Long.MAX_VALUE);
-    testEncoding(Long.MIN_VALUE);
-    testEncoding(0l);
-    testEncoding(128l);
-    testEncoding(256l);
-    testEncoding(512l);
-    testEncoding(-256l);
-  }
-
-  private static void testEncoding(long l) {
-    byte[] b = GenericObjectMapper.writeReverseOrderedLong(l);
-    assertEquals("error decoding", l,
-        GenericObjectMapper.readReverseOrderedLong(b, 0));
-    byte[] buf = new byte[16];
-    System.arraycopy(b, 0, buf, 5, 8);
-    assertEquals("error decoding at offset", l,
-        GenericObjectMapper.readReverseOrderedLong(buf, 5));
-    if (l > Long.MIN_VALUE) {
-      byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1);
-      assertEquals("error preserving ordering", 1,
-          WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length));
-    }
-    if (l < Long.MAX_VALUE) {
-      byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1);
-      assertEquals("error preserving ordering", 1,
-          WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length));
-    }
-  }
-
-  private static void verify(Object o) throws IOException {
-    assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o)));
-  }
-
-  @Test
-  public void testValueTypes() throws IOException {
-    verify(Integer.MAX_VALUE);
-    verify(Integer.MIN_VALUE);
-    assertEquals(Integer.MAX_VALUE, GenericObjectMapper.read(
-        GenericObjectMapper.write((long) Integer.MAX_VALUE)));
-    assertEquals(Integer.MIN_VALUE, GenericObjectMapper.read(
-        GenericObjectMapper.write((long) Integer.MIN_VALUE)));
-    verify((long)Integer.MAX_VALUE + 1l);
-    verify((long)Integer.MIN_VALUE - 1l);
-
-    verify(Long.MAX_VALUE);
-    verify(Long.MIN_VALUE);
-
-    assertEquals(42, GenericObjectMapper.read(GenericObjectMapper.write(42l)));
-    verify(42);
-    verify(1.23);
-    verify("abc");
-    verify(true);
-    List<String> list = new ArrayList<String>();
-    list.add("123");
-    list.add("abc");
-    verify(list);
-    Map<String,String> map = new HashMap<String,String>();
-    map.put("k1","v1");
-    map.put("k2","v2");
-    verify(map);
-  }
-
-}