You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by av...@apache.org on 2018/04/01 19:14:27 UTC
[ambari] 31/39: AMBARI-22688. Fix AMS compilation issues and unit
test with hbase, hadoop and phoenix upgraded. (swagle)
This is an automated email from the ASF dual-hosted git repository.
avijayan pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git
commit 61772212a0c9502e95c06d957d9df9326178b2d5
Author: Siddharth Wagle <sw...@hortonworks.com>
AuthorDate: Thu Dec 21 15:56:53 2017 -0800
AMBARI-22688. Fix AMS compilation issues and unit test with hbase,hadoop and phoenix upgraded. (swagle)
---
.../conf/unix/ambari-metrics-collector | 6 +-
.../ambari-metrics-timelineservice/pom.xml | 6 +-
...istoryServer.java => AMSApplicationServer.java} | 111 +-
.../ApplicationHistoryClientService.java | 215 ---
.../ApplicationHistoryManager.java | 146 --
.../ApplicationHistoryManagerImpl.java | 250 ----
.../ApplicationHistoryReader.java | 117 --
.../ApplicationHistoryStore.java | 37 -
.../ApplicationHistoryWriter.java | 112 --
.../FileSystemApplicationHistoryStore.java | 784 -----------
.../MemoryApplicationHistoryStore.java | 274 ----
.../NullApplicationHistoryStore.java | 127 --
.../metrics/timeline/PhoenixHBaseAccessor.java | 141 +-
.../timeline/TimelineMetricConfiguration.java | 3 -
.../timeline/query/PhoenixConnectionProvider.java | 3 +-
.../metrics/timeline/query/PhoenixTransactSQL.java | 3 +
.../timeline/EntityIdentifier.java | 100 --
.../timeline/LeveldbTimelineStore.java | 1473 --------------------
.../timeline/MemoryTimelineStore.java | 360 -----
.../webapp/AHSController.java | 55 -
.../webapp/AHSLogsPage.java | 55 -
.../applicationhistoryservice/webapp/AHSView.java | 90 --
.../webapp/AHSWebApp.java | 66 -
.../webapp/AHSWebServices.java | 162 ---
.../AMSController.java} | 23 +-
.../webapp/{ContainerPage.java => AMSWebApp.java} | 29 +-
.../webapp/AppAttemptPage.java | 69 -
.../applicationhistoryservice/webapp/AppPage.java | 71 -
.../applicationhistoryservice/webapp/NavBlock.java | 51 -
.../webapp/TimelineWebServices.java | 250 +---
.../ApplicationHistoryStoreTestUtils.java | 84 --
.../TestApplicationHistoryClientService.java | 209 ---
.../TestApplicationHistoryManagerImpl.java | 76 -
.../TestApplicationHistoryServer.java | 267 ----
.../TestFileSystemApplicationHistoryStore.java | 233 ----
.../TestMemoryApplicationHistoryStore.java | 206 ---
.../timeline/AbstractMiniHBaseClusterTest.java | 6 +-
.../metrics/timeline/ITPhoenixHBaseAccessor.java | 47 +-
.../timeline/TestLeveldbTimelineStore.java | 253 ----
.../timeline/TestMemoryTimelineStore.java | 83 --
.../timeline/TimelineStoreTestUtils.java | 789 -----------
.../webapp/TestAHSWebApp.java | 199 ---
.../webapp/TestAHSWebServices.java | 302 ----
.../webapp/TestTimelineWebServices.java | 297 +---
ambari-metrics/pom.xml | 20 +-
45 files changed, 241 insertions(+), 8019 deletions(-)
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index 552be48..de764ec 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -25,7 +25,7 @@ HBASE_RS_PID=/var/run/ams-hbase/hbase-${USER}-regionserver.pid
HBASE_DIR=/usr/lib/ams-hbase
-DAEMON_NAME=timelineserver
+DAEMON_NAME=ams-metrics-collector
COLLECTOR_CONF_DIR=/etc/ambari-metrics-collector/conf
HBASE_CONF_DIR=/etc/ams-hbase/conf
@@ -238,7 +238,7 @@ function start()
echo "$(date) Launching in distributed mode. Assuming Hbase daemons up and running." | tee -a $STARTUPFILE
fi
- CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
+ CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.AMSApplicationServer'
# YARN_OPTS="${YARN_OPTS} ${YARN_TIMELINESERVER_OPTS}"
# if [[ -n "${YARN_TIMELINESERVER_HEAPSIZE}" ]]; then
# JAVA_HEAP_MAX="-Xmx${YARN_TIMELINESERVER_HEAPSIZE}m"
@@ -263,7 +263,7 @@ function start()
sleep 2
echo "Verifying ${METRIC_COLLECTOR} process status..." | tee -a $STARTUPFILE
- if [ -z "`ps ax | grep -w ${PID} | grep ApplicationHistoryServer`" ]; then
+ if [ -z "`ps ax | grep -w ${PID} | grep AMSApplicationServer`" ]; then
if [ -s ${OUTFILE} ]; then
echo "ERROR: ${METRIC_COLLECTOR} start failed. For more details, see ${OUTFILE}:" | tee -a $STARTUPFILE
echo "===================="
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index 7794a11..e6a7e64 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -34,9 +34,9 @@
<!-- Needed for generating FindBugs warnings using parent pom -->
<!--<yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>-->
<protobuf.version>2.5.0</protobuf.version>
- <hadoop.version>2.7.3.2.6.4.0-91</hadoop.version>
- <phoenix.version>4.7.0.2.6.4.0-91</phoenix.version>
- <hbase.version>1.1.2.2.6.4.0-91</hbase.version>
+ <hadoop.version>3.0.0.3.0.0.0-623</hadoop.version>
+ <phoenix.version>5.0.0.3.0.0.0-623</phoenix.version>
+ <hbase.version>2.0.0.3.0.0.0-623</hbase.version>
</properties>
<build>
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java
similarity index 54%
rename from ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
rename to ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java
index 331670d..f576362 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/AMSApplicationServer.java
@@ -26,9 +26,7 @@ import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.service.CompositeService;
-import org.apache.hadoop.service.Service;
import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
@@ -37,57 +35,42 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.HBaseTimelineMetricsService;
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AMSWebApp;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps;
import com.google.common.annotations.VisibleForTesting;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_APPLICATION_TIMELINE_STORE;
-
/**
- * History server that keeps track of all types of history in the cluster.
- * Application specific history to start with.
+ * Metrics collector web server
*/
-public class ApplicationHistoryServer extends CompositeService {
+public class AMSApplicationServer extends CompositeService {
public static final int SHUTDOWN_HOOK_PRIORITY = 30;
- private static final Log LOG =
- LogFactory.getLog(ApplicationHistoryServer.class);
+ private static final Log LOG = LogFactory.getLog(AMSApplicationServer.class);
- ApplicationHistoryClientService ahsClientService;
- ApplicationHistoryManager historyManager;
- TimelineStore timelineStore;
TimelineMetricStore timelineMetricStore;
private WebApp webApp;
private TimelineMetricConfiguration metricConfiguration;
- public ApplicationHistoryServer() {
- super(ApplicationHistoryServer.class.getName());
+ public AMSApplicationServer() {
+ super(AMSApplicationServer.class.getName());
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
metricConfiguration = TimelineMetricConfiguration.getInstance();
metricConfiguration.initialize();
- historyManager = createApplicationHistory();
- ahsClientService = createApplicationHistoryClientService(historyManager);
- addService(ahsClientService);
- addService((Service) historyManager);
- timelineStore = createTimelineStore(conf);
timelineMetricStore = createTimelineMetricStore(conf);
- addIfService(timelineStore);
addIfService(timelineMetricStore);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
- DefaultMetricsSystem.initialize("ApplicationHistoryServer");
- JvmMetrics.initSingleton("ApplicationHistoryServer", null);
+ DefaultMetricsSystem.initialize("AmbariMetricsSystem");
+ JvmMetrics.initSingleton("AmbariMetricsSystem", null);
startWebApp();
super.serviceStart();
@@ -102,66 +85,30 @@ public class ApplicationHistoryServer extends CompositeService {
DefaultMetricsSystem.shutdown();
super.serviceStop();
}
-
- @Private
- @VisibleForTesting
- public ApplicationHistoryClientService getClientService() {
- return this.ahsClientService;
- }
-
- protected ApplicationHistoryClientService createApplicationHistoryClientService(
- ApplicationHistoryManager historyManager) {
- return new ApplicationHistoryClientService(historyManager, metricConfiguration);
- }
-
- protected ApplicationHistoryManager createApplicationHistory() {
- return new ApplicationHistoryManagerImpl();
- }
-
- protected ApplicationHistoryManager getApplicationHistory() {
- return this.historyManager;
- }
-
- static ApplicationHistoryServer launchAppHistoryServer(String[] args) {
- Thread
- .setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
- StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args,
- LOG);
- ApplicationHistoryServer appHistoryServer = null;
+
+ static AMSApplicationServer launchAppHistoryServer(String[] args) {
+ Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
+ StringUtils.startupShutdownMessage(AMSApplicationServer.class, args, LOG);
+ AMSApplicationServer amsApplicationServer = null;
try {
- appHistoryServer = new ApplicationHistoryServer();
+ amsApplicationServer = new AMSApplicationServer();
ShutdownHookManager.get().addShutdownHook(
- new CompositeServiceShutdownHook(appHistoryServer),
+ new CompositeServiceShutdownHook(amsApplicationServer),
SHUTDOWN_HOOK_PRIORITY);
YarnConfiguration conf = new YarnConfiguration();
- appHistoryServer.init(conf);
- appHistoryServer.start();
+ amsApplicationServer.init(conf);
+ amsApplicationServer.start();
} catch (Throwable t) {
- LOG.fatal("Error starting ApplicationHistoryServer", t);
- ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer");
+ LOG.fatal("Error starting AMSApplicationServer", t);
+ ExitUtil.terminate(-1, "Error starting AMSApplicationServer");
}
- return appHistoryServer;
+ return amsApplicationServer;
}
public static void main(String[] args) {
launchAppHistoryServer(args);
}
- protected ApplicationHistoryManager createApplicationHistoryManager(
- Configuration conf) {
- return new ApplicationHistoryManagerImpl();
- }
-
- protected TimelineStore createTimelineStore(Configuration conf) {
- if (conf.getBoolean(DISABLE_APPLICATION_TIMELINE_STORE, true)) {
- LOG.info("Explicitly disabled application timeline store.");
- return new MemoryTimelineStore();
- }
- return ReflectionUtils.newInstance(conf.getClass(
- YarnConfiguration.TIMELINE_SERVICE_STORE, LeveldbTimelineStore.class,
- TimelineStore.class), conf);
- }
-
protected TimelineMetricStore createTimelineMetricStore(Configuration conf) {
LOG.info("Creating metrics store.");
return new HBaseTimelineMetricsService(metricConfiguration);
@@ -174,7 +121,7 @@ public class ApplicationHistoryServer extends CompositeService {
} catch (Exception e) {
throw new ExceptionInInitializerError("Cannot find bind address");
}
- LOG.info("Instantiating AHSWebApp at " + bindAddress);
+ LOG.info("Instantiating metrics collector at " + bindAddress);
try {
Configuration conf = metricConfiguration.getMetricsConf();
conf.set("hadoop.http.max.threads", String.valueOf(metricConfiguration
@@ -184,25 +131,15 @@ public class ApplicationHistoryServer extends CompositeService {
HttpConfig.Policy.HTTP_ONLY.name()));
webApp =
WebApps
- .$for("applicationhistory", ApplicationHistoryClientService.class,
- ahsClientService, "ws")
+ .$for("ambarimetrics", null, null, "ws")
.withHttpPolicy(conf, policy)
.at(bindAddress)
- .start(new AHSWebApp(timelineStore, timelineMetricStore,
- ahsClientService));
+ .start(new AMSWebApp(timelineMetricStore));
} catch (Exception e) {
String msg = "AHSWebApp failed to start.";
LOG.error(msg, e);
throw new YarnRuntimeException(msg, e);
}
}
-
- /**
- * @return ApplicationTimelineStore
- */
- @Private
- @VisibleForTesting
- public TimelineStore getTimelineStore() {
- return timelineStore;
- }
+
}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
deleted file mode 100644
index 08beb5d..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.util.ArrayList;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
-import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ContainerReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
-import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
-import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
-
-public class ApplicationHistoryClientService extends AbstractService implements
- ApplicationHistoryProtocol {
- private static final Log LOG = LogFactory
- .getLog(ApplicationHistoryClientService.class);
- private ApplicationHistoryManager history;
- private Server server;
- private InetSocketAddress bindAddress;
- private TimelineMetricConfiguration metricConfiguration;
-
- public ApplicationHistoryClientService(ApplicationHistoryManager history) {
- super("ApplicationHistoryClientService");
- this.history = history;
- }
-
- public ApplicationHistoryClientService(ApplicationHistoryManager history,
- TimelineMetricConfiguration metricConfiguration) {
- this(history);
- this.metricConfiguration = metricConfiguration;
- }
-
- protected void serviceStart() throws Exception {
- Configuration conf = getConfig();
- YarnRPC rpc = YarnRPC.create(conf);
- InetSocketAddress address =
- NetUtils.createSocketAddr(metricConfiguration.getTimelineServiceRpcAddress(),
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT);
-
- server =
- rpc.getServer(ApplicationHistoryProtocol.class, this,
- address, conf, null, metricConfiguration.getTimelineMetricsServiceHandlerThreadCount());
-
- server.start();
- this.bindAddress =
- conf.updateConnectAddr(YarnConfiguration.TIMELINE_SERVICE_ADDRESS,
- server.getListenerAddress());
- LOG.info("Instantiated ApplicationHistoryClientService at "
- + this.bindAddress);
-
- super.serviceStart();
- }
-
- @Override
- protected void serviceStop() throws Exception {
- if (server != null) {
- server.stop();
- }
- super.serviceStop();
- }
-
- @Private
- public ApplicationHistoryProtocol getClientHandler() {
- return this;
- }
-
- @Private
- public InetSocketAddress getBindAddress() {
- return this.bindAddress;
- }
-
-
-
- @Override
- public CancelDelegationTokenResponse cancelDelegationToken(
- CancelDelegationTokenRequest request) throws YarnException, IOException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public GetApplicationAttemptReportResponse getApplicationAttemptReport(
- GetApplicationAttemptReportRequest request) throws YarnException,
- IOException {
- try {
- GetApplicationAttemptReportResponse response =
- GetApplicationAttemptReportResponse.newInstance(history
- .getApplicationAttempt(request.getApplicationAttemptId()));
- return response;
- } catch (IOException e) {
- throw new ApplicationAttemptNotFoundException(e.getMessage());
- }
- }
-
- @Override
- public GetApplicationAttemptsResponse getApplicationAttempts(
- GetApplicationAttemptsRequest request) throws YarnException,
- IOException {
- GetApplicationAttemptsResponse response =
- GetApplicationAttemptsResponse
- .newInstance(new ArrayList<ApplicationAttemptReport>(history
- .getApplicationAttempts(request.getApplicationId()).values()));
- return response;
- }
-
- @Override
- public GetApplicationReportResponse getApplicationReport(
- GetApplicationReportRequest request) throws YarnException, IOException {
- try {
- ApplicationId applicationId = request.getApplicationId();
- GetApplicationReportResponse response =
- GetApplicationReportResponse.newInstance(history
- .getApplication(applicationId));
- return response;
- } catch (IOException e) {
- throw new ApplicationNotFoundException(e.getMessage());
- }
- }
-
- @Override
- public GetApplicationsResponse getApplications(
- GetApplicationsRequest request) throws YarnException, IOException {
- GetApplicationsResponse response =
- GetApplicationsResponse.newInstance(new ArrayList<ApplicationReport>(
- history.getApplications(request.getLimit()).values()));
- return response;
- }
-
- @Override
- public GetContainerReportResponse getContainerReport(
- GetContainerReportRequest request) throws YarnException, IOException {
- try {
- GetContainerReportResponse response =
- GetContainerReportResponse.newInstance(history.getContainer(request
- .getContainerId()));
- return response;
- } catch (IOException e) {
- throw new ContainerNotFoundException(e.getMessage());
- }
- }
-
- @Override
- public GetContainersResponse getContainers(GetContainersRequest request)
- throws YarnException, IOException {
- GetContainersResponse response =
- GetContainersResponse.newInstance(new ArrayList<ContainerReport>(
- history.getContainers(request.getApplicationAttemptId()).values()));
- return response;
- }
-
- @Override
- public GetDelegationTokenResponse getDelegationToken(
- GetDelegationTokenRequest request) throws YarnException, IOException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public RenewDelegationTokenResponse renewDelegationToken(
- RenewDelegationTokenRequest request) throws YarnException, IOException {
- // TODO Auto-generated method stub
- return null;
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java
deleted file mode 100644
index 5ddb3af..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManager.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerReport;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-
-@Private
-@Unstable
-public interface ApplicationHistoryManager {
- /**
- * This method returns Application {@link ApplicationReport} for the specified
- * {@link ApplicationId}.
- *
- * @param appId
- *
- * @return {@link ApplicationReport} for the ApplicationId.
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- ApplicationReport getApplication(ApplicationId appId) throws YarnException,
- IOException;
-
- /**
- * This method returns the given number of Application
- * {@link ApplicationReport}s.
- *
- * @param appsNum
- *
- * @return map of {@link ApplicationId} to {@link ApplicationReport}s.
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- Map<ApplicationId, ApplicationReport>
- getApplications(long appsNum) throws YarnException,
- IOException;
-
- /**
- * Application can have multiple application attempts
- * {@link ApplicationAttemptReport}. This method returns the all
- * {@link ApplicationAttemptReport}s for the Application.
- *
- * @param appId
- *
- * @return all {@link ApplicationAttemptReport}s for the Application.
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- Map<ApplicationAttemptId, ApplicationAttemptReport> getApplicationAttempts(
- ApplicationId appId) throws YarnException, IOException;
-
- /**
- * This method returns {@link ApplicationAttemptReport} for specified
- * {@link ApplicationId}.
- *
- * @param appAttemptId
- * {@link ApplicationAttemptId}
- * @return {@link ApplicationAttemptReport} for ApplicationAttemptId
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- ApplicationAttemptReport getApplicationAttempt(
- ApplicationAttemptId appAttemptId) throws YarnException, IOException;
-
- /**
- * This method returns {@link ContainerReport} for specified
- * {@link ContainerId}.
- *
- * @param containerId
- * {@link ContainerId}
- * @return {@link ContainerReport} for ContainerId
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- ContainerReport getContainer(ContainerId containerId) throws YarnException,
- IOException;
-
- /**
- * This method returns {@link ContainerReport} for specified
- * {@link ApplicationAttemptId}.
- *
- * @param appAttemptId
- * {@link ApplicationAttemptId}
- * @return {@link ContainerReport} for ApplicationAttemptId
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- ContainerReport getAMContainer(ApplicationAttemptId appAttemptId)
- throws YarnException, IOException;
-
- /**
- * This method returns Map of {@link ContainerId} to {@link ContainerReport}
- * for specified {@link ApplicationAttemptId}.
- *
- * @param appAttemptId
- * {@link ApplicationAttemptId}
- * @return Map of {@link ContainerId} to {@link ContainerReport} for
- * ApplicationAttemptId
- * @throws YarnException
- * @throws IOException
- */
- @Public
- @Unstable
- Map<ContainerId, ContainerReport> getContainers(
- ApplicationAttemptId appAttemptId) throws YarnException, IOException;
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
deleted file mode 100644
index d699264..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-
-import com.google.common.annotations.VisibleForTesting;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_APPLICATION_TIMELINE_STORE;
-
-public class ApplicationHistoryManagerImpl extends AbstractService implements
- ApplicationHistoryManager {
- private static final Log LOG = LogFactory
- .getLog(ApplicationHistoryManagerImpl.class);
- private static final String UNAVAILABLE = "N/A";
-
- private ApplicationHistoryStore historyStore;
- private String serverHttpAddress;
-
- public ApplicationHistoryManagerImpl() {
- super(ApplicationHistoryManagerImpl.class.getName());
- }
-
- @Override
- protected void serviceInit(Configuration conf) throws Exception {
- LOG.info("ApplicationHistory Init");
- historyStore = createApplicationHistoryStore(conf);
- historyStore.init(conf);
- serverHttpAddress = WebAppUtils.getHttpSchemePrefix(conf) +
- WebAppUtils.getAHSWebAppURLWithoutScheme(conf);
- super.serviceInit(conf);
- }
-
- @Override
- protected void serviceStart() throws Exception {
- LOG.info("Starting ApplicationHistory");
- historyStore.start();
- super.serviceStart();
- }
-
- @Override
- protected void serviceStop() throws Exception {
- LOG.info("Stopping ApplicationHistory");
- historyStore.stop();
- super.serviceStop();
- }
-
- protected ApplicationHistoryStore createApplicationHistoryStore(
- Configuration conf) {
- if (conf.getBoolean(DISABLE_APPLICATION_TIMELINE_STORE, true)) {
- LOG.info("Explicitly disabled application timeline store.");
- return new NullApplicationHistoryStore();
- }
- return ReflectionUtils.newInstance(conf.getClass(
- YarnConfiguration.APPLICATION_HISTORY_STORE,
- NullApplicationHistoryStore.class,
- ApplicationHistoryStore.class), conf);
- }
-
- @Override
- public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId)
- throws IOException {
- ApplicationReport app =
- getApplication(appAttemptId.getApplicationId());
- return convertToContainerReport(historyStore.getAMContainer(appAttemptId),
- app == null ? null : app.getUser());
- }
-
- @Override
- public Map<ApplicationId, ApplicationReport> getApplications(long appsNum)
- throws IOException {
- Map<ApplicationId, ApplicationHistoryData> histData =
- historyStore.getAllApplications();
- HashMap<ApplicationId, ApplicationReport> applicationsReport =
- new HashMap<ApplicationId, ApplicationReport>();
- for (Entry<ApplicationId, ApplicationHistoryData> entry : histData
- .entrySet()) {
- applicationsReport.put(entry.getKey(),
- convertToApplicationReport(entry.getValue()));
- }
- return applicationsReport;
- }
-
- @Override
- public ApplicationReport getApplication(ApplicationId appId)
- throws IOException {
- return convertToApplicationReport(historyStore.getApplication(appId));
- }
-
- private ApplicationReport convertToApplicationReport(
- ApplicationHistoryData appHistory) throws IOException {
- ApplicationAttemptId currentApplicationAttemptId = null;
- String trackingUrl = UNAVAILABLE;
- String host = UNAVAILABLE;
- int rpcPort = -1;
-
- ApplicationAttemptHistoryData lastAttempt =
- getLastAttempt(appHistory.getApplicationId());
- if (lastAttempt != null) {
- currentApplicationAttemptId = lastAttempt.getApplicationAttemptId();
- trackingUrl = lastAttempt.getTrackingURL();
- host = lastAttempt.getHost();
- rpcPort = lastAttempt.getRPCPort();
- }
- return ApplicationReport.newInstance(appHistory.getApplicationId(),
- currentApplicationAttemptId, appHistory.getUser(), appHistory.getQueue(),
- appHistory.getApplicationName(), host, rpcPort, null,
- appHistory.getYarnApplicationState(), appHistory.getDiagnosticsInfo(),
- trackingUrl, appHistory.getStartTime(), appHistory.getFinishTime(),
- appHistory.getFinalApplicationStatus(), null, "", 100,
- appHistory.getApplicationType(), null);
- }
-
- private ApplicationAttemptHistoryData getLastAttempt(ApplicationId appId)
- throws IOException {
- Map<ApplicationAttemptId, ApplicationAttemptHistoryData> attempts =
- historyStore.getApplicationAttempts(appId);
- ApplicationAttemptId prevMaxAttemptId = null;
- for (ApplicationAttemptId attemptId : attempts.keySet()) {
- if (prevMaxAttemptId == null) {
- prevMaxAttemptId = attemptId;
- } else {
- if (prevMaxAttemptId.getAttemptId() < attemptId.getAttemptId()) {
- prevMaxAttemptId = attemptId;
- }
- }
- }
- return attempts.get(prevMaxAttemptId);
- }
-
- private ApplicationAttemptReport convertToApplicationAttemptReport(
- ApplicationAttemptHistoryData appAttemptHistory) {
- return ApplicationAttemptReport.newInstance(
- appAttemptHistory.getApplicationAttemptId(), appAttemptHistory.getHost(),
- appAttemptHistory.getRPCPort(), appAttemptHistory.getTrackingURL(),
- null,
- appAttemptHistory.getDiagnosticsInfo(),
- appAttemptHistory.getYarnApplicationAttemptState(),
- appAttemptHistory.getMasterContainerId());
- }
-
- @Override
- public ApplicationAttemptReport getApplicationAttempt(
- ApplicationAttemptId appAttemptId) throws IOException {
- return convertToApplicationAttemptReport(historyStore
- .getApplicationAttempt(appAttemptId));
- }
-
- @Override
- public Map<ApplicationAttemptId, ApplicationAttemptReport>
- getApplicationAttempts(ApplicationId appId) throws IOException {
- Map<ApplicationAttemptId, ApplicationAttemptHistoryData> histData =
- historyStore.getApplicationAttempts(appId);
- HashMap<ApplicationAttemptId, ApplicationAttemptReport> applicationAttemptsReport =
- new HashMap<ApplicationAttemptId, ApplicationAttemptReport>();
- for (Entry<ApplicationAttemptId, ApplicationAttemptHistoryData> entry : histData
- .entrySet()) {
- applicationAttemptsReport.put(entry.getKey(),
- convertToApplicationAttemptReport(entry.getValue()));
- }
- return applicationAttemptsReport;
- }
-
- @Override
- public ContainerReport getContainer(ContainerId containerId)
- throws IOException {
- ApplicationReport app =
- getApplication(containerId.getApplicationAttemptId().getApplicationId());
- return convertToContainerReport(historyStore.getContainer(containerId),
- app == null ? null: app.getUser());
- }
-
- private ContainerReport convertToContainerReport(
- ContainerHistoryData containerHistory, String user) {
- // If the container has the aggregated log, add the server root url
- String logUrl = WebAppUtils.getAggregatedLogURL(
- serverHttpAddress,
- containerHistory.getAssignedNode().toString(),
- containerHistory.getContainerId().toString(),
- containerHistory.getContainerId().toString(),
- user);
- return ContainerReport.newInstance(containerHistory.getContainerId(),
- containerHistory.getAllocatedResource(),
- containerHistory.getAssignedNode(), containerHistory.getPriority(),
- containerHistory.getStartTime(), containerHistory.getFinishTime(),
- containerHistory.getDiagnosticsInfo(), logUrl,
- containerHistory.getContainerExitStatus(),
- containerHistory.getContainerState(), serverHttpAddress);
- }
-
- @Override
- public Map<ContainerId, ContainerReport> getContainers(
- ApplicationAttemptId appAttemptId) throws IOException {
- ApplicationReport app =
- getApplication(appAttemptId.getApplicationId());
- Map<ContainerId, ContainerHistoryData> histData =
- historyStore.getContainers(appAttemptId);
- HashMap<ContainerId, ContainerReport> containersReport =
- new HashMap<ContainerId, ContainerReport>();
- for (Entry<ContainerId, ContainerHistoryData> entry : histData.entrySet()) {
- containersReport.put(entry.getKey(),
- convertToContainerReport(entry.getValue(),
- app == null ? null : app.getUser()));
- }
- return containersReport;
- }
-
- @Private
- @VisibleForTesting
- public ApplicationHistoryStore getHistoryStore() {
- return this.historyStore;
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java
deleted file mode 100644
index 590853a..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryReader.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public interface ApplicationHistoryReader {
-
- /**
- * This method returns Application {@link ApplicationHistoryData} for the
- * specified {@link ApplicationId}.
- *
- * @param appId
- *
- * @return {@link ApplicationHistoryData} for the ApplicationId.
- * @throws IOException
- */
- ApplicationHistoryData getApplication(ApplicationId appId) throws IOException;
-
- /**
- * This method returns all Application {@link ApplicationHistoryData}s
- *
- * @return map of {@link ApplicationId} to {@link ApplicationHistoryData}s.
- * @throws IOException
- */
- Map<ApplicationId, ApplicationHistoryData> getAllApplications()
- throws IOException;
-
- /**
- * Application can have multiple application attempts
- * {@link ApplicationAttemptHistoryData}. This method returns the all
- * {@link ApplicationAttemptHistoryData}s for the Application.
- *
- * @param appId
- *
- * @return all {@link ApplicationAttemptHistoryData}s for the Application.
- * @throws IOException
- */
- Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
- getApplicationAttempts(ApplicationId appId) throws IOException;
-
- /**
- * This method returns {@link ApplicationAttemptHistoryData} for specified
- * {@link ApplicationId}.
- *
- * @param appAttemptId
- * {@link ApplicationAttemptId}
- * @return {@link ApplicationAttemptHistoryData} for ApplicationAttemptId
- * @throws IOException
- */
- ApplicationAttemptHistoryData getApplicationAttempt(
- ApplicationAttemptId appAttemptId) throws IOException;
-
- /**
- * This method returns {@link ContainerHistoryData} for specified
- * {@link ContainerId}.
- *
- * @param containerId
- * {@link ContainerId}
- * @return {@link ContainerHistoryData} for ContainerId
- * @throws IOException
- */
- ContainerHistoryData getContainer(ContainerId containerId) throws IOException;
-
- /**
- * This method returns {@link ContainerHistoryData} for specified
- * {@link ApplicationAttemptId}.
- *
- * @param appAttemptId
- * {@link ApplicationAttemptId}
- * @return {@link ContainerHistoryData} for ApplicationAttemptId
- * @throws IOException
- */
- ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId)
- throws IOException;
-
- /**
- * This method returns Map{@link ContainerId} to {@link ContainerHistoryData}
- * for specified {@link ApplicationAttemptId}.
- *
- * @param appAttemptId
- * {@link ApplicationAttemptId}
- * @return Map{@link ContainerId} to {@link ContainerHistoryData} for
- * ApplicationAttemptId
- * @throws IOException
- */
- Map<ContainerId, ContainerHistoryData> getContainers(
- ApplicationAttemptId appAttemptId) throws IOException;
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java
deleted file mode 100644
index c26faef..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStore.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.service.Service;
-
-/**
- * This class is the abstract of the storage of the application history data. It
- * is a {@link Service}, such that the implementation of this class can make use
- * of the service life cycle to initialize and cleanup the storage. Users can
- * access the storage via {@link ApplicationHistoryReader} and
- * {@link ApplicationHistoryWriter} interfaces.
- *
- */
-@InterfaceAudience.Public
-@InterfaceStability.Unstable
-public interface ApplicationHistoryStore extends Service,
- ApplicationHistoryReader, ApplicationHistoryWriter {
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java
deleted file mode 100644
index 09ba36d..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryWriter.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
-
-/**
- * It is the interface of writing the application history, exposing the methods
- * of writing {@link ApplicationStartData}, {@link ApplicationFinishData}
- * {@link ApplicationAttemptStartData}, {@link ApplicationAttemptFinishData},
- * {@link ContainerStartData} and {@link ContainerFinishData}.
- */
-@Private
-@Unstable
-public interface ApplicationHistoryWriter {
-
- /**
- * This method writes the information of <code>RMApp</code> that is available
- * when it starts.
- *
- * @param appStart
- * the record of the information of <code>RMApp</code> that is
- * available when it starts
- * @throws IOException
- */
- void applicationStarted(ApplicationStartData appStart) throws IOException;
-
- /**
- * This method writes the information of <code>RMApp</code> that is available
- * when it finishes.
- *
- * @param appFinish
- * the record of the information of <code>RMApp</code> that is
- * available when it finishes
- * @throws IOException
- */
- void applicationFinished(ApplicationFinishData appFinish) throws IOException;
-
- /**
- * This method writes the information of <code>RMAppAttempt</code> that is
- * available when it starts.
- *
- * @param appAttemptStart
- * the record of the information of <code>RMAppAttempt</code> that is
- * available when it starts
- * @throws IOException
- */
- void applicationAttemptStarted(ApplicationAttemptStartData appAttemptStart)
- throws IOException;
-
- /**
- * This method writes the information of <code>RMAppAttempt</code> that is
- * available when it finishes.
- *
- * @param appAttemptFinish
- * the record of the information of <code>RMAppAttempt</code> that is
- * available when it finishes
- * @throws IOException
- */
- void
- applicationAttemptFinished(ApplicationAttemptFinishData appAttemptFinish)
- throws IOException;
-
- /**
- * This method writes the information of <code>RMContainer</code> that is
- * available when it starts.
- *
- * @param containerStart
- * the record of the information of <code>RMContainer</code> that is
- * available when it starts
- * @throws IOException
- */
- void containerStarted(ContainerStartData containerStart) throws IOException;
-
- /**
- * This method writes the information of <code>RMContainer</code> that is
- * available when it finishes.
- *
- * @param containerFinish
- * the record of the information of <code>RMContainer</code> that is
- * available when it finishes
- * @throws IOException
- */
- void containerFinished(ContainerFinishData containerFinish)
- throws IOException;
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java
deleted file mode 100644
index 4c8d745..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java
+++ /dev/null
@@ -1,784 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.file.tfile.TFile;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto;
-import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto;
-import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto;
-import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto;
-import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto;
-import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptFinishDataPBImpl;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptStartDataPBImpl;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationFinishDataPBImpl;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationStartDataPBImpl;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerFinishDataPBImpl;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerStartDataPBImpl;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-
-import com.google.protobuf.InvalidProtocolBufferException;
-
-/**
- * File system implementation of {@link ApplicationHistoryStore}. In this
- * implementation, one application will have just one file in the file system,
- * which contains all the history data of one application, and its attempts and
- * containers. {@link #applicationStarted(ApplicationStartData)} is supposed to
- * be invoked first when writing any history data of one application and it will
- * open a file, while {@link #applicationFinished(ApplicationFinishData)} is
- * supposed to be last writing operation and will close the file.
- */
-@Public
-@Unstable
-public class FileSystemApplicationHistoryStore extends AbstractService
- implements ApplicationHistoryStore {
-
- private static final Log LOG = LogFactory
- .getLog(FileSystemApplicationHistoryStore.class);
-
- private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot";
- private static final int MIN_BLOCK_SIZE = 256 * 1024;
- private static final String START_DATA_SUFFIX = "_start";
- private static final String FINISH_DATA_SUFFIX = "_finish";
- private static final FsPermission ROOT_DIR_UMASK = FsPermission
- .createImmutable((short) 0740);
- private static final FsPermission HISTORY_FILE_UMASK = FsPermission
- .createImmutable((short) 0640);
-
- private FileSystem fs;
- private Path rootDirPath;
-
- private ConcurrentMap<ApplicationId, HistoryFileWriter> outstandingWriters =
- new ConcurrentHashMap<ApplicationId, HistoryFileWriter>();
-
- public FileSystemApplicationHistoryStore() {
- super(FileSystemApplicationHistoryStore.class.getName());
- }
-
- @Override
- public void serviceInit(Configuration conf) throws Exception {
- Path fsWorkingPath =
- new Path(conf.get(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI));
- rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME);
- try {
- fs = fsWorkingPath.getFileSystem(conf);
- fs.mkdirs(rootDirPath);
- fs.setPermission(rootDirPath, ROOT_DIR_UMASK);
- } catch (IOException e) {
- LOG.error("Error when initializing FileSystemHistoryStorage", e);
- throw e;
- }
- super.serviceInit(conf);
- }
-
- @Override
- public void serviceStop() throws Exception {
- try {
- for (Entry<ApplicationId, HistoryFileWriter> entry : outstandingWriters
- .entrySet()) {
- entry.getValue().close();
- }
- outstandingWriters.clear();
- } finally {
- IOUtils.cleanup(LOG, fs);
- }
- super.serviceStop();
- }
-
- @Override
- public ApplicationHistoryData getApplication(ApplicationId appId)
- throws IOException {
- HistoryFileReader hfReader = getHistoryFileReader(appId);
- try {
- boolean readStartData = false;
- boolean readFinishData = false;
- ApplicationHistoryData historyData =
- ApplicationHistoryData.newInstance(appId, null, null, null, null,
- Long.MIN_VALUE, Long.MIN_VALUE, Long.MAX_VALUE, null,
- FinalApplicationStatus.UNDEFINED, null);
- while ((!readStartData || !readFinishData) && hfReader.hasNext()) {
- HistoryFileReader.Entry entry = hfReader.next();
- if (entry.key.id.equals(appId.toString())) {
- if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
- ApplicationStartData startData =
- parseApplicationStartData(entry.value);
- mergeApplicationHistoryData(historyData, startData);
- readStartData = true;
- } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
- ApplicationFinishData finishData =
- parseApplicationFinishData(entry.value);
- mergeApplicationHistoryData(historyData, finishData);
- readFinishData = true;
- }
- }
- }
- if (!readStartData && !readFinishData) {
- return null;
- }
- if (!readStartData) {
- LOG.warn("Start information is missing for application " + appId);
- }
- if (!readFinishData) {
- LOG.warn("Finish information is missing for application " + appId);
- }
- LOG.info("Completed reading history information of application " + appId);
- return historyData;
- } catch (IOException e) {
- LOG.error("Error when reading history file of application " + appId);
- throw e;
- } finally {
- hfReader.close();
- }
- }
-
- @Override
- public Map<ApplicationId, ApplicationHistoryData> getAllApplications()
- throws IOException {
- Map<ApplicationId, ApplicationHistoryData> historyDataMap =
- new HashMap<ApplicationId, ApplicationHistoryData>();
- FileStatus[] files = fs.listStatus(rootDirPath);
- for (FileStatus file : files) {
- ApplicationId appId =
- ConverterUtils.toApplicationId(file.getPath().getName());
- try {
- ApplicationHistoryData historyData = getApplication(appId);
- if (historyData != null) {
- historyDataMap.put(appId, historyData);
- }
- } catch (IOException e) {
- // Eat the exception not to disturb the getting the next
- // ApplicationHistoryData
- LOG.error("History information of application " + appId
- + " is not included into the result due to the exception", e);
- }
- }
- return historyDataMap;
- }
-
- @Override
- public Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
- getApplicationAttempts(ApplicationId appId) throws IOException {
- Map<ApplicationAttemptId, ApplicationAttemptHistoryData> historyDataMap =
- new HashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>();
- HistoryFileReader hfReader = getHistoryFileReader(appId);
- try {
- while (hfReader.hasNext()) {
- HistoryFileReader.Entry entry = hfReader.next();
- if (entry.key.id.startsWith(
- ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) {
- ApplicationAttemptId appAttemptId =
- ConverterUtils.toApplicationAttemptId(entry.key.id);
- if (appAttemptId.getApplicationId().equals(appId)) {
- ApplicationAttemptHistoryData historyData =
- historyDataMap.get(appAttemptId);
- if (historyData == null) {
- historyData = ApplicationAttemptHistoryData.newInstance(
- appAttemptId, null, -1, null, null, null,
- FinalApplicationStatus.UNDEFINED, null);
- historyDataMap.put(appAttemptId, historyData);
- }
- if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
- mergeApplicationAttemptHistoryData(historyData,
- parseApplicationAttemptStartData(entry.value));
- } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
- mergeApplicationAttemptHistoryData(historyData,
- parseApplicationAttemptFinishData(entry.value));
- }
- }
- }
- }
- LOG.info("Completed reading history information of all application"
- + " attempts of application " + appId);
- } catch (IOException e) {
- LOG.info("Error when reading history information of some application"
- + " attempts of application " + appId);
- } finally {
- hfReader.close();
- }
- return historyDataMap;
- }
-
- @Override
- public ApplicationAttemptHistoryData getApplicationAttempt(
- ApplicationAttemptId appAttemptId) throws IOException {
- HistoryFileReader hfReader =
- getHistoryFileReader(appAttemptId.getApplicationId());
- try {
- boolean readStartData = false;
- boolean readFinishData = false;
- ApplicationAttemptHistoryData historyData =
- ApplicationAttemptHistoryData.newInstance(appAttemptId, null, -1,
- null, null, null, FinalApplicationStatus.UNDEFINED, null);
- while ((!readStartData || !readFinishData) && hfReader.hasNext()) {
- HistoryFileReader.Entry entry = hfReader.next();
- if (entry.key.id.equals(appAttemptId.toString())) {
- if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
- ApplicationAttemptStartData startData =
- parseApplicationAttemptStartData(entry.value);
- mergeApplicationAttemptHistoryData(historyData, startData);
- readStartData = true;
- } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
- ApplicationAttemptFinishData finishData =
- parseApplicationAttemptFinishData(entry.value);
- mergeApplicationAttemptHistoryData(historyData, finishData);
- readFinishData = true;
- }
- }
- }
- if (!readStartData && !readFinishData) {
- return null;
- }
- if (!readStartData) {
- LOG.warn("Start information is missing for application attempt "
- + appAttemptId);
- }
- if (!readFinishData) {
- LOG.warn("Finish information is missing for application attempt "
- + appAttemptId);
- }
- LOG.info("Completed reading history information of application attempt "
- + appAttemptId);
- return historyData;
- } catch (IOException e) {
- LOG.error("Error when reading history file of application attempt"
- + appAttemptId);
- throw e;
- } finally {
- hfReader.close();
- }
- }
-
- @Override
- public ContainerHistoryData getContainer(ContainerId containerId)
- throws IOException {
- HistoryFileReader hfReader =
- getHistoryFileReader(containerId.getApplicationAttemptId()
- .getApplicationId());
- try {
- boolean readStartData = false;
- boolean readFinishData = false;
- ContainerHistoryData historyData =
- ContainerHistoryData
- .newInstance(containerId, null, null, null, Long.MIN_VALUE,
- Long.MAX_VALUE, null, Integer.MAX_VALUE, null);
- while ((!readStartData || !readFinishData) && hfReader.hasNext()) {
- HistoryFileReader.Entry entry = hfReader.next();
- if (entry.key.id.equals(containerId.toString())) {
- if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
- ContainerStartData startData = parseContainerStartData(entry.value);
- mergeContainerHistoryData(historyData, startData);
- readStartData = true;
- } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
- ContainerFinishData finishData =
- parseContainerFinishData(entry.value);
- mergeContainerHistoryData(historyData, finishData);
- readFinishData = true;
- }
- }
- }
- if (!readStartData && !readFinishData) {
- return null;
- }
- if (!readStartData) {
- LOG.warn("Start information is missing for container " + containerId);
- }
- if (!readFinishData) {
- LOG.warn("Finish information is missing for container " + containerId);
- }
- LOG.info("Completed reading history information of container "
- + containerId);
- return historyData;
- } catch (IOException e) {
- LOG.error("Error when reading history file of container " + containerId);
- throw e;
- } finally {
- hfReader.close();
- }
- }
-
- @Override
- public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId)
- throws IOException {
- ApplicationAttemptHistoryData attemptHistoryData =
- getApplicationAttempt(appAttemptId);
- if (attemptHistoryData == null
- || attemptHistoryData.getMasterContainerId() == null) {
- return null;
- }
- return getContainer(attemptHistoryData.getMasterContainerId());
- }
-
- @Override
- public Map<ContainerId, ContainerHistoryData> getContainers(
- ApplicationAttemptId appAttemptId) throws IOException {
- Map<ContainerId, ContainerHistoryData> historyDataMap =
- new HashMap<ContainerId, ContainerHistoryData>();
- HistoryFileReader hfReader =
- getHistoryFileReader(appAttemptId.getApplicationId());
- try {
- while (hfReader.hasNext()) {
- HistoryFileReader.Entry entry = hfReader.next();
- if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) {
- ContainerId containerId =
- ConverterUtils.toContainerId(entry.key.id);
- if (containerId.getApplicationAttemptId().equals(appAttemptId)) {
- ContainerHistoryData historyData =
- historyDataMap.get(containerId);
- if (historyData == null) {
- historyData = ContainerHistoryData.newInstance(
- containerId, null, null, null, Long.MIN_VALUE,
- Long.MAX_VALUE, null, Integer.MAX_VALUE, null);
- historyDataMap.put(containerId, historyData);
- }
- if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
- mergeContainerHistoryData(historyData,
- parseContainerStartData(entry.value));
- } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
- mergeContainerHistoryData(historyData,
- parseContainerFinishData(entry.value));
- }
- }
- }
- }
- LOG.info("Completed reading history information of all conatiners"
- + " of application attempt " + appAttemptId);
- } catch (IOException e) {
- LOG.info("Error when reading history information of some containers"
- + " of application attempt " + appAttemptId);
- } finally {
- hfReader.close();
- }
- return historyDataMap;
- }
-
- @Override
- public void applicationStarted(ApplicationStartData appStart)
- throws IOException {
- HistoryFileWriter hfWriter =
- outstandingWriters.get(appStart.getApplicationId());
- if (hfWriter == null) {
- Path applicationHistoryFile =
- new Path(rootDirPath, appStart.getApplicationId().toString());
- try {
- hfWriter = new HistoryFileWriter(applicationHistoryFile);
- LOG.info("Opened history file of application "
- + appStart.getApplicationId());
- } catch (IOException e) {
- LOG.error("Error when openning history file of application "
- + appStart.getApplicationId());
- throw e;
- }
- outstandingWriters.put(appStart.getApplicationId(), hfWriter);
- } else {
- throw new IOException("History file of application "
- + appStart.getApplicationId() + " is already opened");
- }
- assert appStart instanceof ApplicationStartDataPBImpl;
- try {
- hfWriter.writeHistoryData(new HistoryDataKey(appStart.getApplicationId()
- .toString(), START_DATA_SUFFIX),
- ((ApplicationStartDataPBImpl) appStart).getProto().toByteArray());
- LOG.info("Start information of application "
- + appStart.getApplicationId() + " is written");
- } catch (IOException e) {
- LOG.error("Error when writing start information of application "
- + appStart.getApplicationId());
- throw e;
- }
- }
-
- @Override
- public void applicationFinished(ApplicationFinishData appFinish)
- throws IOException {
- HistoryFileWriter hfWriter =
- getHistoryFileWriter(appFinish.getApplicationId());
- assert appFinish instanceof ApplicationFinishDataPBImpl;
- try {
- hfWriter.writeHistoryData(new HistoryDataKey(appFinish.getApplicationId()
- .toString(), FINISH_DATA_SUFFIX),
- ((ApplicationFinishDataPBImpl) appFinish).getProto().toByteArray());
- LOG.info("Finish information of application "
- + appFinish.getApplicationId() + " is written");
- } catch (IOException e) {
- LOG.error("Error when writing finish information of application "
- + appFinish.getApplicationId());
- throw e;
- } finally {
- hfWriter.close();
- outstandingWriters.remove(appFinish.getApplicationId());
- }
- }
-
- @Override
- public void applicationAttemptStarted(
- ApplicationAttemptStartData appAttemptStart) throws IOException {
- HistoryFileWriter hfWriter =
- getHistoryFileWriter(appAttemptStart.getApplicationAttemptId()
- .getApplicationId());
- assert appAttemptStart instanceof ApplicationAttemptStartDataPBImpl;
- try {
- hfWriter.writeHistoryData(new HistoryDataKey(appAttemptStart
- .getApplicationAttemptId().toString(), START_DATA_SUFFIX),
- ((ApplicationAttemptStartDataPBImpl) appAttemptStart).getProto()
- .toByteArray());
- LOG.info("Start information of application attempt "
- + appAttemptStart.getApplicationAttemptId() + " is written");
- } catch (IOException e) {
- LOG.error("Error when writing start information of application attempt "
- + appAttemptStart.getApplicationAttemptId());
- throw e;
- }
- }
-
- @Override
- public void applicationAttemptFinished(
- ApplicationAttemptFinishData appAttemptFinish) throws IOException {
- HistoryFileWriter hfWriter =
- getHistoryFileWriter(appAttemptFinish.getApplicationAttemptId()
- .getApplicationId());
- assert appAttemptFinish instanceof ApplicationAttemptFinishDataPBImpl;
- try {
- hfWriter.writeHistoryData(new HistoryDataKey(appAttemptFinish
- .getApplicationAttemptId().toString(), FINISH_DATA_SUFFIX),
- ((ApplicationAttemptFinishDataPBImpl) appAttemptFinish).getProto()
- .toByteArray());
- LOG.info("Finish information of application attempt "
- + appAttemptFinish.getApplicationAttemptId() + " is written");
- } catch (IOException e) {
- LOG.error("Error when writing finish information of application attempt "
- + appAttemptFinish.getApplicationAttemptId());
- throw e;
- }
- }
-
- @Override
- public void containerStarted(ContainerStartData containerStart)
- throws IOException {
- HistoryFileWriter hfWriter =
- getHistoryFileWriter(containerStart.getContainerId()
- .getApplicationAttemptId().getApplicationId());
- assert containerStart instanceof ContainerStartDataPBImpl;
- try {
- hfWriter.writeHistoryData(new HistoryDataKey(containerStart
- .getContainerId().toString(), START_DATA_SUFFIX),
- ((ContainerStartDataPBImpl) containerStart).getProto().toByteArray());
- LOG.info("Start information of container "
- + containerStart.getContainerId() + " is written");
- } catch (IOException e) {
- LOG.error("Error when writing start information of container "
- + containerStart.getContainerId());
- throw e;
- }
- }
-
- @Override
- public void containerFinished(ContainerFinishData containerFinish)
- throws IOException {
- HistoryFileWriter hfWriter =
- getHistoryFileWriter(containerFinish.getContainerId()
- .getApplicationAttemptId().getApplicationId());
- assert containerFinish instanceof ContainerFinishDataPBImpl;
- try {
- hfWriter.writeHistoryData(new HistoryDataKey(containerFinish
- .getContainerId().toString(), FINISH_DATA_SUFFIX),
- ((ContainerFinishDataPBImpl) containerFinish).getProto().toByteArray());
- LOG.info("Finish information of container "
- + containerFinish.getContainerId() + " is written");
- } catch (IOException e) {
- LOG.error("Error when writing finish information of container "
- + containerFinish.getContainerId());
- }
- }
-
- private static ApplicationStartData parseApplicationStartData(byte[] value)
- throws InvalidProtocolBufferException {
- return new ApplicationStartDataPBImpl(
- ApplicationStartDataProto.parseFrom(value));
- }
-
- private static ApplicationFinishData parseApplicationFinishData(byte[] value)
- throws InvalidProtocolBufferException {
- return new ApplicationFinishDataPBImpl(
- ApplicationFinishDataProto.parseFrom(value));
- }
-
- private static ApplicationAttemptStartData parseApplicationAttemptStartData(
- byte[] value) throws InvalidProtocolBufferException {
- return new ApplicationAttemptStartDataPBImpl(
- ApplicationAttemptStartDataProto.parseFrom(value));
- }
-
- private static ApplicationAttemptFinishData
- parseApplicationAttemptFinishData(byte[] value)
- throws InvalidProtocolBufferException {
- return new ApplicationAttemptFinishDataPBImpl(
- ApplicationAttemptFinishDataProto.parseFrom(value));
- }
-
- private static ContainerStartData parseContainerStartData(byte[] value)
- throws InvalidProtocolBufferException {
- return new ContainerStartDataPBImpl(
- ContainerStartDataProto.parseFrom(value));
- }
-
- private static ContainerFinishData parseContainerFinishData(byte[] value)
- throws InvalidProtocolBufferException {
- return new ContainerFinishDataPBImpl(
- ContainerFinishDataProto.parseFrom(value));
- }
-
- private static void mergeApplicationHistoryData(
- ApplicationHistoryData historyData, ApplicationStartData startData) {
- historyData.setApplicationName(startData.getApplicationName());
- historyData.setApplicationType(startData.getApplicationType());
- historyData.setQueue(startData.getQueue());
- historyData.setUser(startData.getUser());
- historyData.setSubmitTime(startData.getSubmitTime());
- historyData.setStartTime(startData.getStartTime());
- }
-
- private static void mergeApplicationHistoryData(
- ApplicationHistoryData historyData, ApplicationFinishData finishData) {
- historyData.setFinishTime(finishData.getFinishTime());
- historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo());
- historyData.setFinalApplicationStatus(finishData
- .getFinalApplicationStatus());
- historyData.setYarnApplicationState(finishData.getYarnApplicationState());
- }
-
- private static void mergeApplicationAttemptHistoryData(
- ApplicationAttemptHistoryData historyData,
- ApplicationAttemptStartData startData) {
- historyData.setHost(startData.getHost());
- historyData.setRPCPort(startData.getRPCPort());
- historyData.setMasterContainerId(startData.getMasterContainerId());
- }
-
- private static void mergeApplicationAttemptHistoryData(
- ApplicationAttemptHistoryData historyData,
- ApplicationAttemptFinishData finishData) {
- historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo());
- historyData.setTrackingURL(finishData.getTrackingURL());
- historyData.setFinalApplicationStatus(finishData
- .getFinalApplicationStatus());
- historyData.setYarnApplicationAttemptState(finishData
- .getYarnApplicationAttemptState());
- }
-
- private static void mergeContainerHistoryData(
- ContainerHistoryData historyData, ContainerStartData startData) {
- historyData.setAllocatedResource(startData.getAllocatedResource());
- historyData.setAssignedNode(startData.getAssignedNode());
- historyData.setPriority(startData.getPriority());
- historyData.setStartTime(startData.getStartTime());
- }
-
- private static void mergeContainerHistoryData(
- ContainerHistoryData historyData, ContainerFinishData finishData) {
- historyData.setFinishTime(finishData.getFinishTime());
- historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo());
- historyData.setContainerExitStatus(finishData.getContainerExitStatus());
- historyData.setContainerState(finishData.getContainerState());
- }
-
- private HistoryFileWriter getHistoryFileWriter(ApplicationId appId)
- throws IOException {
- HistoryFileWriter hfWriter = outstandingWriters.get(appId);
- if (hfWriter == null) {
- throw new IOException("History file of application " + appId
- + " is not opened");
- }
- return hfWriter;
- }
-
- private HistoryFileReader getHistoryFileReader(ApplicationId appId)
- throws IOException {
- Path applicationHistoryFile = new Path(rootDirPath, appId.toString());
- if (!fs.exists(applicationHistoryFile)) {
- throw new IOException("History file for application " + appId
- + " is not found");
- }
- // The history file is still under writing
- if (outstandingWriters.containsKey(appId)) {
- throw new IOException("History file for application " + appId
- + " is under writing");
- }
- return new HistoryFileReader(applicationHistoryFile);
- }
-
- private class HistoryFileReader {
-
- private class Entry {
-
- private HistoryDataKey key;
- private byte[] value;
-
- public Entry(HistoryDataKey key, byte[] value) {
- this.key = key;
- this.value = value;
- }
- }
-
- private TFile.Reader reader;
- private TFile.Reader.Scanner scanner;
-
- public HistoryFileReader(Path historyFile) throws IOException {
- FSDataInputStream fsdis = fs.open(historyFile);
- reader =
- new TFile.Reader(fsdis, fs.getFileStatus(historyFile).getLen(),
- getConfig());
- reset();
- }
-
- public boolean hasNext() {
- return !scanner.atEnd();
- }
-
- public Entry next() throws IOException {
- TFile.Reader.Scanner.Entry entry = scanner.entry();
- DataInputStream dis = entry.getKeyStream();
- HistoryDataKey key = new HistoryDataKey();
- key.readFields(dis);
- dis = entry.getValueStream();
- byte[] value = new byte[entry.getValueLength()];
- dis.read(value);
- scanner.advance();
- return new Entry(key, value);
- }
-
- public void reset() throws IOException {
- IOUtils.cleanup(LOG, scanner);
- scanner = reader.createScanner();
- }
-
- public void close() {
- IOUtils.cleanup(LOG, scanner, reader);
- }
-
- }
-
- private class HistoryFileWriter {
-
- private FSDataOutputStream fsdos;
- private TFile.Writer writer;
-
- public HistoryFileWriter(Path historyFile) throws IOException {
- if (fs.exists(historyFile)) {
- fsdos = fs.append(historyFile);
- } else {
- fsdos = fs.create(historyFile);
- }
- fs.setPermission(historyFile, HISTORY_FILE_UMASK);
- writer =
- new TFile.Writer(fsdos, MIN_BLOCK_SIZE, getConfig().get(
- YarnConfiguration.FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE,
- YarnConfiguration.DEFAULT_FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE), null,
- getConfig());
- }
-
- public synchronized void close() {
- IOUtils.cleanup(LOG, writer, fsdos);
- }
-
- public synchronized void writeHistoryData(HistoryDataKey key, byte[] value)
- throws IOException {
- DataOutputStream dos = null;
- try {
- dos = writer.prepareAppendKey(-1);
- key.write(dos);
- } finally {
- IOUtils.cleanup(LOG, dos);
- }
- try {
- dos = writer.prepareAppendValue(value.length);
- dos.write(value);
- } finally {
- IOUtils.cleanup(LOG, dos);
- }
- }
-
- }
-
- private static class HistoryDataKey implements Writable {
-
- private String id;
-
- private String suffix;
-
- public HistoryDataKey() {
- this(null, null);
- }
-
- public HistoryDataKey(String id, String suffix) {
- this.id = id;
- this.suffix = suffix;
- }
-
- @Override
- public void write(DataOutput out) throws IOException {
- out.writeUTF(id);
- out.writeUTF(suffix);
- }
-
- @Override
- public void readFields(DataInput in) throws IOException {
- id = in.readUTF();
- suffix = in.readUTF();
- }
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java
deleted file mode 100644
index c226ad3..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java
+++ /dev/null
@@ -1,274 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
-
-/**
- * In-memory implementation of {@link ApplicationHistoryStore}. This
- * implementation is for test purpose only. If users improperly instantiate it,
- * they may encounter reading and writing history data in different memory
- * store.
- *
- */
-@Private
-@Unstable
-public class MemoryApplicationHistoryStore extends AbstractService implements
- ApplicationHistoryStore {
-
- private final ConcurrentMap<ApplicationId, ApplicationHistoryData> applicationData =
- new ConcurrentHashMap<ApplicationId, ApplicationHistoryData>();
- private final ConcurrentMap<ApplicationId, ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData>> applicationAttemptData =
- new ConcurrentHashMap<ApplicationId, ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData>>();
- private final ConcurrentMap<ApplicationAttemptId, ConcurrentMap<ContainerId, ContainerHistoryData>> containerData =
- new ConcurrentHashMap<ApplicationAttemptId, ConcurrentMap<ContainerId, ContainerHistoryData>>();
-
- public MemoryApplicationHistoryStore() {
- super(MemoryApplicationHistoryStore.class.getName());
- }
-
- @Override
- public Map<ApplicationId, ApplicationHistoryData> getAllApplications() {
- return new HashMap<ApplicationId, ApplicationHistoryData>(applicationData);
- }
-
- @Override
- public ApplicationHistoryData getApplication(ApplicationId appId) {
- return applicationData.get(appId);
- }
-
- @Override
- public Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
- getApplicationAttempts(ApplicationId appId) {
- ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
- applicationAttemptData.get(appId);
- if (subMap == null) {
- return Collections
- .<ApplicationAttemptId, ApplicationAttemptHistoryData> emptyMap();
- } else {
- return new HashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>(
- subMap);
- }
- }
-
- @Override
- public ApplicationAttemptHistoryData getApplicationAttempt(
- ApplicationAttemptId appAttemptId) {
- ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
- applicationAttemptData.get(appAttemptId.getApplicationId());
- if (subMap == null) {
- return null;
- } else {
- return subMap.get(appAttemptId);
- }
- }
-
- @Override
- public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) {
- ApplicationAttemptHistoryData appAttempt =
- getApplicationAttempt(appAttemptId);
- if (appAttempt == null || appAttempt.getMasterContainerId() == null) {
- return null;
- } else {
- return getContainer(appAttempt.getMasterContainerId());
- }
- }
-
- @Override
- public ContainerHistoryData getContainer(ContainerId containerId) {
- Map<ContainerId, ContainerHistoryData> subMap =
- containerData.get(containerId.getApplicationAttemptId());
- if (subMap == null) {
- return null;
- } else {
- return subMap.get(containerId);
- }
- }
-
- @Override
- public Map<ContainerId, ContainerHistoryData> getContainers(
- ApplicationAttemptId appAttemptId) throws IOException {
- ConcurrentMap<ContainerId, ContainerHistoryData> subMap =
- containerData.get(appAttemptId);
- if (subMap == null) {
- return Collections.<ContainerId, ContainerHistoryData> emptyMap();
- } else {
- return new HashMap<ContainerId, ContainerHistoryData>(subMap);
- }
- }
-
- @Override
- public void applicationStarted(ApplicationStartData appStart)
- throws IOException {
- ApplicationHistoryData oldData =
- applicationData.putIfAbsent(appStart.getApplicationId(),
- ApplicationHistoryData.newInstance(appStart.getApplicationId(),
- appStart.getApplicationName(), appStart.getApplicationType(),
- appStart.getQueue(), appStart.getUser(), appStart.getSubmitTime(),
- appStart.getStartTime(), Long.MAX_VALUE, null, null, null));
- if (oldData != null) {
- throw new IOException("The start information of application "
- + appStart.getApplicationId() + " is already stored.");
- }
- }
-
- @Override
- public void applicationFinished(ApplicationFinishData appFinish)
- throws IOException {
- ApplicationHistoryData data =
- applicationData.get(appFinish.getApplicationId());
- if (data == null) {
- throw new IOException("The finish information of application "
- + appFinish.getApplicationId() + " is stored before the start"
- + " information.");
- }
- // Make the assumption that YarnApplicationState should not be null if
- // the finish information is already recorded
- if (data.getYarnApplicationState() != null) {
- throw new IOException("The finish information of application "
- + appFinish.getApplicationId() + " is already stored.");
- }
- data.setFinishTime(appFinish.getFinishTime());
- data.setDiagnosticsInfo(appFinish.getDiagnosticsInfo());
- data.setFinalApplicationStatus(appFinish.getFinalApplicationStatus());
- data.setYarnApplicationState(appFinish.getYarnApplicationState());
- }
-
- @Override
- public void applicationAttemptStarted(
- ApplicationAttemptStartData appAttemptStart) throws IOException {
- ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
- getSubMap(appAttemptStart.getApplicationAttemptId().getApplicationId());
- ApplicationAttemptHistoryData oldData =
- subMap.putIfAbsent(appAttemptStart.getApplicationAttemptId(),
- ApplicationAttemptHistoryData.newInstance(
- appAttemptStart.getApplicationAttemptId(),
- appAttemptStart.getHost(), appAttemptStart.getRPCPort(),
- appAttemptStart.getMasterContainerId(), null, null, null, null));
- if (oldData != null) {
- throw new IOException("The start information of application attempt "
- + appAttemptStart.getApplicationAttemptId() + " is already stored.");
- }
- }
-
- @Override
- public void applicationAttemptFinished(
- ApplicationAttemptFinishData appAttemptFinish) throws IOException {
- ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
- getSubMap(appAttemptFinish.getApplicationAttemptId().getApplicationId());
- ApplicationAttemptHistoryData data =
- subMap.get(appAttemptFinish.getApplicationAttemptId());
- if (data == null) {
- throw new IOException("The finish information of application attempt "
- + appAttemptFinish.getApplicationAttemptId() + " is stored before"
- + " the start information.");
- }
- // Make the assumption that YarnApplicationAttemptState should not be null
- // if the finish information is already recorded
- if (data.getYarnApplicationAttemptState() != null) {
- throw new IOException("The finish information of application attempt "
- + appAttemptFinish.getApplicationAttemptId() + " is already stored.");
- }
- data.setTrackingURL(appAttemptFinish.getTrackingURL());
- data.setDiagnosticsInfo(appAttemptFinish.getDiagnosticsInfo());
- data
- .setFinalApplicationStatus(appAttemptFinish.getFinalApplicationStatus());
- data.setYarnApplicationAttemptState(appAttemptFinish
- .getYarnApplicationAttemptState());
- }
-
- private ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData>
- getSubMap(ApplicationId appId) {
- applicationAttemptData
- .putIfAbsent(
- appId,
- new ConcurrentHashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>());
- return applicationAttemptData.get(appId);
- }
-
- @Override
- public void containerStarted(ContainerStartData containerStart)
- throws IOException {
- ConcurrentMap<ContainerId, ContainerHistoryData> subMap =
- getSubMap(containerStart.getContainerId().getApplicationAttemptId());
- ContainerHistoryData oldData =
- subMap.putIfAbsent(containerStart.getContainerId(),
- ContainerHistoryData.newInstance(containerStart.getContainerId(),
- containerStart.getAllocatedResource(),
- containerStart.getAssignedNode(), containerStart.getPriority(),
- containerStart.getStartTime(), Long.MAX_VALUE, null,
- Integer.MAX_VALUE, null));
- if (oldData != null) {
- throw new IOException("The start information of container "
- + containerStart.getContainerId() + " is already stored.");
- }
- }
-
- @Override
- public void containerFinished(ContainerFinishData containerFinish)
- throws IOException {
- ConcurrentMap<ContainerId, ContainerHistoryData> subMap =
- getSubMap(containerFinish.getContainerId().getApplicationAttemptId());
- ContainerHistoryData data = subMap.get(containerFinish.getContainerId());
- if (data == null) {
- throw new IOException("The finish information of container "
- + containerFinish.getContainerId() + " is stored before"
- + " the start information.");
- }
- // Make the assumption that ContainerState should not be null if
- // the finish information is already recorded
- if (data.getContainerState() != null) {
- throw new IOException("The finish information of container "
- + containerFinish.getContainerId() + " is already stored.");
- }
- data.setFinishTime(containerFinish.getFinishTime());
- data.setDiagnosticsInfo(containerFinish.getDiagnosticsInfo());
- data.setContainerExitStatus(containerFinish.getContainerExitStatus());
- data.setContainerState(containerFinish.getContainerState());
- }
-
- private ConcurrentMap<ContainerId, ContainerHistoryData> getSubMap(
- ApplicationAttemptId appAttemptId) {
- containerData.putIfAbsent(appAttemptId,
- new ConcurrentHashMap<ContainerId, ContainerHistoryData>());
- return containerData.get(appAttemptId);
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/NullApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/NullApplicationHistoryStore.java
deleted file mode 100644
index 3660c10..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/NullApplicationHistoryStore.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Map;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
-
-/**
- * Dummy implementation of {@link ApplicationHistoryStore}. If this
- * implementation is used, no history data will be persisted.
- *
- */
-@Unstable
-@Private
-public class NullApplicationHistoryStore extends AbstractService implements
- ApplicationHistoryStore {
-
- public NullApplicationHistoryStore() {
- super(NullApplicationHistoryStore.class.getName());
- }
-
- @Override
- public void applicationStarted(ApplicationStartData appStart)
- throws IOException {
- }
-
- @Override
- public void applicationFinished(ApplicationFinishData appFinish)
- throws IOException {
- }
-
- @Override
- public void applicationAttemptStarted(
- ApplicationAttemptStartData appAttemptStart) throws IOException {
- }
-
- @Override
- public void applicationAttemptFinished(
- ApplicationAttemptFinishData appAttemptFinish) throws IOException {
- }
-
- @Override
- public void containerStarted(ContainerStartData containerStart)
- throws IOException {
- }
-
- @Override
- public void containerFinished(ContainerFinishData containerFinish)
- throws IOException {
- }
-
- @Override
- public ApplicationHistoryData getApplication(ApplicationId appId)
- throws IOException {
- return null;
- }
-
- @Override
- public Map<ApplicationId, ApplicationHistoryData> getAllApplications()
- throws IOException {
- return Collections.emptyMap();
- }
-
- @Override
- public Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
- getApplicationAttempts(ApplicationId appId) throws IOException {
- return Collections.emptyMap();
- }
-
- @Override
- public ApplicationAttemptHistoryData getApplicationAttempt(
- ApplicationAttemptId appAttemptId) throws IOException {
- return null;
- }
-
- @Override
- public ContainerHistoryData getContainer(ContainerId containerId)
- throws IOException {
- return null;
- }
-
- @Override
- public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId)
- throws IOException {
- return null;
- }
-
- @Override
- public Map<ContainerId, ContainerHistoryData> getContainers(
- ApplicationAttemptId appAttemptId) throws IOException {
- return Collections.emptyMap();
- }
-
-}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 0626e8e..fc26f5d 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -50,11 +50,11 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ALTER_METRICS_METADATA_TABLE;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.ANOMALY_METRICS_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CONTAINER_METRICS_TABLE_NAME;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_ANOMALY_METRICS_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_CONTAINER_METRICS_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_HOSTED_APPS_METADATA_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_INSTANCE_HOST_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_TABLE_SQL;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_ANOMALY_METRICS_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_GROUPED_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.CREATE_METRICS_METADATA_TABLE_SQL;
@@ -74,6 +74,7 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.PHOENIX_TABLES;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.PHOENIX_TABLES_REGEX_PATTERN;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.TREND_ANOMALY_METRICS_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_ANOMALY_METRICS_SQL;
@@ -103,6 +104,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ArrayBlockingQueue;
@@ -116,10 +118,14 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.RetryCounter;
import org.apache.hadoop.hbase.util.RetryCounterFactory;
import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
@@ -153,7 +159,6 @@ import org.apache.phoenix.exception.PhoenixIOException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
-import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
@@ -208,7 +213,7 @@ public class PhoenixHBaseAccessor {
static final String BLOCKING_STORE_FILES_KEY =
"hbase.hstore.blockingStoreFiles";
- private HashMap<String, String> tableTTL = new HashMap<>();
+ private Map<String, Integer> tableTTL = new HashMap<>();
private final TimelineMetricConfiguration configuration;
private List<InternalMetricsSource> rawMetricsSources = new ArrayList<>();
@@ -253,15 +258,15 @@ public class PhoenixHBaseAccessor {
this.timelineMetricsTablesDurability = metricsConf.get(TIMELINE_METRICS_AGGREGATE_TABLES_DURABILITY, "");
this.timelineMetricsPrecisionTableDurability = metricsConf.get(TIMELINE_METRICS_PRECISION_TABLE_DURABILITY, "");
- tableTTL.put(METRICS_RECORD_TABLE_NAME, metricsConf.get(PRECISION_TABLE_TTL, String.valueOf(1 * 86400))); // 1 day
- tableTTL.put(CONTAINER_METRICS_TABLE_NAME, metricsConf.get(CONTAINER_METRICS_TTL, String.valueOf(30 * 86400))); // 30 days
- tableTTL.put(METRICS_AGGREGATE_MINUTE_TABLE_NAME, metricsConf.get(HOST_MINUTE_TABLE_TTL, String.valueOf(7 * 86400))); //7 days
- tableTTL.put(METRICS_AGGREGATE_HOURLY_TABLE_NAME, metricsConf.get(HOST_HOUR_TABLE_TTL, String.valueOf(30 * 86400))); //30 days
- tableTTL.put(METRICS_AGGREGATE_DAILY_TABLE_NAME, metricsConf.get(HOST_DAILY_TABLE_TTL, String.valueOf(365 * 86400))); //1 year
- tableTTL.put(METRICS_CLUSTER_AGGREGATE_TABLE_NAME, metricsConf.get(CLUSTER_SECOND_TABLE_TTL, String.valueOf(7 * 86400))); //7 days
- tableTTL.put(METRICS_CLUSTER_AGGREGATE_MINUTE_TABLE_NAME, metricsConf.get(CLUSTER_MINUTE_TABLE_TTL, String.valueOf(30 * 86400))); //30 days
- tableTTL.put(METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME, metricsConf.get(CLUSTER_HOUR_TABLE_TTL, String.valueOf(365 * 86400))); //1 year
- tableTTL.put(METRICS_CLUSTER_AGGREGATE_DAILY_TABLE_NAME, metricsConf.get(CLUSTER_DAILY_TABLE_TTL, String.valueOf(730 * 86400))); //2 years
+ tableTTL.put(METRICS_RECORD_TABLE_NAME, metricsConf.getInt(PRECISION_TABLE_TTL, 1 * 86400)); // 1 day
+ tableTTL.put(CONTAINER_METRICS_TABLE_NAME, metricsConf.getInt(CONTAINER_METRICS_TTL, 30 * 86400)); // 30 days
+ tableTTL.put(METRICS_AGGREGATE_MINUTE_TABLE_NAME, metricsConf.getInt(HOST_MINUTE_TABLE_TTL, 7 * 86400)); //7 days
+ tableTTL.put(METRICS_AGGREGATE_HOURLY_TABLE_NAME, metricsConf.getInt(HOST_HOUR_TABLE_TTL, 30 * 86400)); //30 days
+ tableTTL.put(METRICS_AGGREGATE_DAILY_TABLE_NAME, metricsConf.getInt(HOST_DAILY_TABLE_TTL, 365 * 86400)); //1 year
+ tableTTL.put(METRICS_CLUSTER_AGGREGATE_TABLE_NAME, metricsConf.getInt(CLUSTER_SECOND_TABLE_TTL, 7 * 86400)); //7 days
+ tableTTL.put(METRICS_CLUSTER_AGGREGATE_MINUTE_TABLE_NAME, metricsConf.getInt(CLUSTER_MINUTE_TABLE_TTL, 30 * 86400)); //30 days
+ tableTTL.put(METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME, metricsConf.getInt(CLUSTER_HOUR_TABLE_TTL, 365 * 86400)); //1 year
+ tableTTL.put(METRICS_CLUSTER_AGGREGATE_DAILY_TABLE_NAME, metricsConf.getInt(CLUSTER_DAILY_TABLE_TTL, 730 * 86400)); //2 years
if (cacheEnabled) {
LOG.debug("Initialising and starting metrics cache committer thread...");
@@ -495,7 +500,7 @@ public class PhoenixHBaseAccessor {
* @return @HBaseAdmin
* @throws IOException
*/
- HBaseAdmin getHBaseAdmin() throws IOException {
+ Admin getHBaseAdmin() throws IOException {
return dataSource.getHBaseAdmin();
}
@@ -612,55 +617,85 @@ public class PhoenixHBaseAccessor {
}
protected void initPoliciesAndTTL() {
-
- HBaseAdmin hBaseAdmin = null;
+ Admin hBaseAdmin = null;
try {
hBaseAdmin = dataSource.getHBaseAdmin();
} catch (IOException e) {
LOG.warn("Unable to initialize HBaseAdmin for setting policies.", e);
}
+ TableName[] tableNames = null;
if (hBaseAdmin != null) {
+ try {
+ tableNames = hBaseAdmin.listTableNames(PHOENIX_TABLES_REGEX_PATTERN, false);
+ } catch (IOException e) {
+ LOG.warn("Unable to get table names from HBaseAdmin for setting policies.", e);
+ return;
+ }
+ if (tableNames == null || tableNames.length == 0) {
+ LOG.warn("Unable to get table names from HBaseAdmin for setting policies.");
+ return;
+ }
for (String tableName : PHOENIX_TABLES) {
try {
boolean modifyTable = false;
- HTableDescriptor tableDescriptor = hBaseAdmin.getTableDescriptor(tableName.getBytes());
+ Optional<TableName> tableNameOptional = Arrays.stream(tableNames)
+ .filter(t -> tableName.equals(t.getNameAsString())).findFirst();
+
+ TableDescriptor tableDescriptor = null;
+ if (tableNameOptional.isPresent()) {
+ tableDescriptor = hBaseAdmin.getTableDescriptor(tableNameOptional.get());
+ }
+
+ if (tableDescriptor == null) {
+ LOG.warn("Unable to get table descriptor for " + tableName);
+ continue;
+ }
+
+ // @TableDescriptor is immutable by design
+ TableDescriptorBuilder tableDescriptorBuilder =
+ TableDescriptorBuilder.newBuilder(tableDescriptor);
//Set normalizer preferences
boolean enableNormalizer = hbaseConf.getBoolean("hbase.normalizer.enabled", false);
if (enableNormalizer ^ tableDescriptor.isNormalizationEnabled()) {
- tableDescriptor.setNormalizationEnabled(enableNormalizer);
+ tableDescriptorBuilder.setNormalizationEnabled(enableNormalizer);
LOG.info("Normalizer set to " + enableNormalizer + " for " + tableName);
modifyTable = true;
}
//Set durability preferences
- boolean durabilitySettingsModified = setDurabilityForTable(tableName, tableDescriptor);
+ boolean durabilitySettingsModified = setDurabilityForTable(tableName, tableDescriptorBuilder);
modifyTable = modifyTable || durabilitySettingsModified;
//Set compaction policy preferences
boolean compactionPolicyModified = false;
- compactionPolicyModified = setCompactionPolicyForTable(tableName, tableDescriptor);
+ compactionPolicyModified = setCompactionPolicyForTable(tableName, tableDescriptorBuilder);
modifyTable = modifyTable || compactionPolicyModified;
// Change TTL setting to match user configuration
- HColumnDescriptor[] columnFamilies = tableDescriptor.getColumnFamilies();
- if (columnFamilies != null) {
- for (HColumnDescriptor family : columnFamilies) {
- String ttlValue = family.getValue("TTL");
- if (StringUtils.isEmpty(ttlValue) ||
- !ttlValue.trim().equals(tableTTL.get(tableName))) {
- family.setValue("TTL", tableTTL.get(tableName));
+ ColumnFamilyDescriptor[] columnFamilyDescriptors = tableDescriptor.getColumnFamilies();
+ if (columnFamilyDescriptors != null) {
+ for (ColumnFamilyDescriptor familyDescriptor : columnFamilyDescriptors) {
+ int ttlValue = familyDescriptor.getTimeToLive();
+ if (ttlValue != tableTTL.get(tableName)) {
+ ColumnFamilyDescriptorBuilder familyDescriptorBuilder =
+ ColumnFamilyDescriptorBuilder.newBuilder(familyDescriptor);
+
+ familyDescriptorBuilder.setTimeToLive(tableTTL.get(tableName));
+
LOG.info("Setting TTL on table: " + tableName + " to : " +
tableTTL.get(tableName) + " seconds.");
- modifyTable = true;
+
+ hBaseAdmin.modifyColumnFamily(tableNameOptional.get(), familyDescriptorBuilder.build());
+ // modifyTable = true;
}
}
}
// Persist only if anything changed
if (modifyTable) {
- hBaseAdmin.modifyTable(tableName.getBytes(), tableDescriptor);
+ hBaseAdmin.modifyTable(tableNameOptional.get(), tableDescriptorBuilder.build());
}
} catch (IOException e) {
@@ -675,10 +710,10 @@ public class PhoenixHBaseAccessor {
}
}
- private boolean setDurabilityForTable(String tableName, HTableDescriptor tableDescriptor) {
+ private boolean setDurabilityForTable(String tableName, TableDescriptorBuilder tableDescriptor) {
boolean modifyTable = false;
- //Set WAL preferences
+ // Set WAL preferences
if (METRICS_RECORD_TABLE_NAME.equals(tableName)) {
if (!timelineMetricsPrecisionTableDurability.isEmpty()) {
LOG.info("Setting WAL option " + timelineMetricsPrecisionTableDurability + " for table : " + tableName);
@@ -723,7 +758,9 @@ public class PhoenixHBaseAccessor {
return modifyTable;
}
- private boolean setCompactionPolicyForTable(String tableName, HTableDescriptor tableDescriptor) {
+ private boolean setCompactionPolicyForTable(String tableName, TableDescriptorBuilder tableDescriptorBuilder) {
+
+ boolean modifyTable = false;
String compactionPolicyKey = metricsConf.get(TIMELINE_METRICS_HBASE_AGGREGATE_TABLE_COMPACTION_POLICY_KEY,
HSTORE_ENGINE_CLASS);
@@ -738,38 +775,32 @@ public class PhoenixHBaseAccessor {
FIFO_COMPACTION_POLICY_CLASS);
blockingStoreFiles = hbaseConf.getInt(TIMELINE_METRICS_PRECISION_TABLE_HBASE_BLOCKING_STORE_FILES, 1000);
}
-
- Map<String, String> config = new HashMap(tableDescriptor.getConfiguration());
-
+
if (StringUtils.isEmpty(compactionPolicyKey) || StringUtils.isEmpty(compactionPolicyClass)) {
- config.remove(HSTORE_COMPACTION_CLASS_KEY);
- config.remove(HSTORE_ENGINE_CLASS);
- //Default blockingStoreFiles = 300
- setHbaseBlockingStoreFiles(tableDescriptor, tableName, 300);
+ // Default blockingStoreFiles = 300
+ modifyTable = setHbaseBlockingStoreFiles(tableDescriptorBuilder, tableName, 300);
} else {
- tableDescriptor.setConfiguration(compactionPolicyKey, compactionPolicyClass);
- setHbaseBlockingStoreFiles(tableDescriptor, tableName, blockingStoreFiles);
- }
-
- if (!compactionPolicyKey.equals(HSTORE_ENGINE_CLASS)) {
- tableDescriptor.removeConfiguration(HSTORE_ENGINE_CLASS);
- }
- if (!compactionPolicyKey.equals(HSTORE_COMPACTION_CLASS_KEY)) {
- tableDescriptor.removeConfiguration(HSTORE_COMPACTION_CLASS_KEY);
+ tableDescriptorBuilder.setValue(compactionPolicyKey, compactionPolicyClass);
+ tableDescriptorBuilder.removeValue(HSTORE_ENGINE_CLASS.getBytes());
+ tableDescriptorBuilder.removeValue(HSTORE_COMPACTION_CLASS_KEY.getBytes());
+ setHbaseBlockingStoreFiles(tableDescriptorBuilder, tableName, blockingStoreFiles);
+ modifyTable = true;
}
- Map<String, String> newConfig = tableDescriptor.getConfiguration();
- return !Maps.difference(config, newConfig).areEqual();
+ return modifyTable;
}
- private void setHbaseBlockingStoreFiles(HTableDescriptor tableDescriptor, String tableName, int value) {
+ private boolean setHbaseBlockingStoreFiles(TableDescriptorBuilder tableDescriptor,
+ String tableName, int value) {
int blockingStoreFiles = hbaseConf.getInt(HBASE_BLOCKING_STORE_FILES, value);
if (blockingStoreFiles != value) {
blockingStoreFiles = value;
+ tableDescriptor.setValue(BLOCKING_STORE_FILES_KEY, String.valueOf(value));
+ LOG.info("Setting config property " + BLOCKING_STORE_FILES_KEY +
+ " = " + blockingStoreFiles + " for " + tableName);
+ return true;
}
- tableDescriptor.setConfiguration(BLOCKING_STORE_FILES_KEY, String.valueOf(value));
- LOG.info("Setting config property " + BLOCKING_STORE_FILES_KEY +
- " = " + blockingStoreFiles + " for " + tableName);
+ return false;
}
protected String getSplitPointsStr(String splitPoints) {
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index 395ec7b..7c6f62b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -198,9 +198,6 @@ public class TimelineMetricConfiguration {
public static final String CLUSTER_AGGREGATOR_DAILY_DISABLED =
"timeline.metrics.cluster.aggregator.daily.disabled";
- public static final String DISABLE_APPLICATION_TIMELINE_STORE =
- "timeline.service.disable.application.timeline.store";
-
public static final String WEBAPP_HTTP_ADDRESS =
"timeline.metrics.service.webapp.address";
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixConnectionProvider.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixConnectionProvider.java
index cacbcfb..a7a20fd 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixConnectionProvider.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixConnectionProvider.java
@@ -1,5 +1,6 @@
package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query;
+import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import java.io.IOException;
@@ -27,5 +28,5 @@ public interface PhoenixConnectionProvider extends ConnectionProvider {
* @return
* @throws IOException
*/
- HBaseAdmin getHBaseAdmin() throws IOException;
+ Admin getHBaseAdmin() throws IOException;
}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
index 75a9d28..a1755f0 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/query/PhoenixTransactSQL.java
@@ -30,6 +30,7 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.concurrent.TimeUnit;
+import java.util.regex.Pattern;
/**
* Encapsulate all metrics related SQL queries.
@@ -424,6 +425,8 @@ public class PhoenixTransactSQL {
public static final String METRICS_CLUSTER_AGGREGATE_DAILY_TABLE_NAME =
"METRIC_AGGREGATE_DAILY";
+ public static final Pattern PHOENIX_TABLES_REGEX_PATTERN = Pattern.compile("METRIC_");
+
public static final String[] PHOENIX_TABLES = {
METRICS_RECORD_TABLE_NAME,
METRICS_AGGREGATE_MINUTE_TABLE_NAME,
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java
deleted file mode 100644
index 4b202d8..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/EntityIdentifier.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- * The unique identifier for an entity
- */
-@Private
-@Unstable
-public class EntityIdentifier implements Comparable<EntityIdentifier> {
-
- private String id;
- private String type;
-
- public EntityIdentifier(String id, String type) {
- this.id = id;
- this.type = type;
- }
-
- /**
- * Get the entity Id.
- * @return The entity Id.
- */
- public String getId() {
- return id;
- }
-
- /**
- * Get the entity type.
- * @return The entity type.
- */
- public String getType() {
- return type;
- }
-
- @Override
- public int compareTo(EntityIdentifier other) {
- int c = type.compareTo(other.type);
- if (c != 0) return c;
- return id.compareTo(other.id);
- }
-
- @Override
- public int hashCode() {
- // generated by eclipse
- final int prime = 31;
- int result = 1;
- result = prime * result + ((id == null) ? 0 : id.hashCode());
- result = prime * result + ((type == null) ? 0 : type.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- // generated by eclipse
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- EntityIdentifier other = (EntityIdentifier) obj;
- if (id == null) {
- if (other.id != null)
- return false;
- } else if (!id.equals(other.id))
- return false;
- if (type == null) {
- if (other.type != null)
- return false;
- } else if (!type.equals(other.type))
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "{ id: " + id + ", type: "+ type + " }";
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
deleted file mode 100644
index edd4842..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
+++ /dev/null
@@ -1,1473 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeMap;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.collections.map.LRUMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.io.WritableComparator;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.fusesource.leveldbjni.JniDBFactory;
-import org.iq80.leveldb.DB;
-import org.iq80.leveldb.DBIterator;
-import org.iq80.leveldb.Options;
-import org.iq80.leveldb.ReadOptions;
-import org.iq80.leveldb.WriteBatch;
-import org.iq80.leveldb.WriteOptions;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.readReverseOrderedLong;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
-
-/**
- * <p>An implementation of an application timeline store backed by leveldb.</p>
- *
- * <p>There are three sections of the db, the start time section,
- * the entity section, and the indexed entity section.</p>
- *
- * <p>The start time section is used to retrieve the unique start time for
- * a given entity. Its values each contain a start time while its keys are of
- * the form:</p>
- * <pre>
- * START_TIME_LOOKUP_PREFIX + entity type + entity id</pre>
- *
- * <p>The entity section is ordered by entity type, then entity start time
- * descending, then entity ID. There are four sub-sections of the entity
- * section: events, primary filters, related entities,
- * and other info. The event entries have event info serialized into their
- * values. The other info entries have values corresponding to the values of
- * the other info name/value map for the entry (note the names are contained
- * in the key). All other entries have empty values. The key structure is as
- * follows:</p>
- * <pre>
- * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id
- *
- * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- * EVENTS_COLUMN + reveventtimestamp + eventtype
- *
- * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- * PRIMARY_FILTERS_COLUMN + name + value
- *
- * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- * OTHER_INFO_COLUMN + name
- *
- * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- * RELATED_ENTITIES_COLUMN + relatedentity type + relatedentity id
- *
- * ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
- * INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN + relatedentity type +
- * relatedentity id</pre>
- *
- * <p>The indexed entity section contains a primary filter name and primary
- * filter value as the prefix. Within a given name/value, entire entity
- * entries are stored in the same format as described in the entity section
- * above (below, "key" represents any one of the possible entity entry keys
- * described above).</p>
- * <pre>
- * INDEXED_ENTRY_PREFIX + primaryfilter name + primaryfilter value +
- * key</pre>
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class LeveldbTimelineStore extends AbstractService
- implements TimelineStore {
- private static final Log LOG = LogFactory
- .getLog(LeveldbTimelineStore.class);
-
- private static final String FILENAME = "leveldb-timeline-store.ldb";
-
- private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes();
- private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes();
- private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes();
-
- private static final byte[] EVENTS_COLUMN = "e".getBytes();
- private static final byte[] PRIMARY_FILTERS_COLUMN = "f".getBytes();
- private static final byte[] OTHER_INFO_COLUMN = "i".getBytes();
- private static final byte[] RELATED_ENTITIES_COLUMN = "r".getBytes();
- private static final byte[] INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN =
- "z".getBytes();
-
- private static final byte[] EMPTY_BYTES = new byte[0];
-
- private Map<EntityIdentifier, StartAndInsertTime> startTimeWriteCache;
- private Map<EntityIdentifier, Long> startTimeReadCache;
-
- /**
- * Per-entity locks are obtained when writing.
- */
- private final LockMap<EntityIdentifier> writeLocks =
- new LockMap<EntityIdentifier>();
-
- private final ReentrantReadWriteLock deleteLock =
- new ReentrantReadWriteLock();
-
- private DB db;
-
- private Thread deletionThread;
-
- public LeveldbTimelineStore() {
- super(LeveldbTimelineStore.class.getName());
- }
-
- @Override
- @SuppressWarnings("unchecked")
- protected void serviceInit(Configuration conf) throws Exception {
- Options options = new Options();
- options.createIfMissing(true);
- options.cacheSize(conf.getLong(
- YarnConfiguration.TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE));
- JniDBFactory factory = new JniDBFactory();
- String path = conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH);
- File p = new File(path);
- if (!p.exists()) {
- if (!p.mkdirs()) {
- throw new IOException("Couldn't create directory for leveldb " +
- "timeline store " + path);
- }
- }
- LOG.info("Using leveldb path " + path);
- db = factory.open(new File(path, FILENAME), options);
- startTimeWriteCache =
- Collections.synchronizedMap(new LRUMap(getStartTimeWriteCacheSize(
- conf)));
- startTimeReadCache =
- Collections.synchronizedMap(new LRUMap(getStartTimeReadCacheSize(
- conf)));
-
- if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, true)) {
- deletionThread = new EntityDeletionThread(conf);
- deletionThread.start();
- }
-
- super.serviceInit(conf);
- }
-
- @Override
- protected void serviceStop() throws Exception {
- if (deletionThread != null) {
- deletionThread.interrupt();
- LOG.info("Waiting for deletion thread to complete its current action");
- try {
- deletionThread.join();
- } catch (InterruptedException e) {
- LOG.warn("Interrupted while waiting for deletion thread to complete," +
- " closing db now", e);
- }
- }
- IOUtils.cleanup(LOG, db);
- super.serviceStop();
- }
-
- private static class StartAndInsertTime {
- final long startTime;
- final long insertTime;
-
- public StartAndInsertTime(long startTime, long insertTime) {
- this.startTime = startTime;
- this.insertTime = insertTime;
- }
- }
-
- private class EntityDeletionThread extends Thread {
- private final long ttl;
- private final long ttlInterval;
-
- public EntityDeletionThread(Configuration conf) {
- ttl = conf.getLong(YarnConfiguration.TIMELINE_SERVICE_TTL_MS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_TTL_MS);
- ttlInterval = conf.getLong(
- YarnConfiguration.TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS,
- YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_TTL_INTERVAL_MS);
- LOG.info("Starting deletion thread with ttl " + ttl + " and cycle " +
- "interval " + ttlInterval);
- }
-
- @Override
- public void run() {
- while (true) {
- long timestamp = System.currentTimeMillis() - ttl;
- try {
- discardOldEntities(timestamp);
- Thread.sleep(ttlInterval);
- } catch (IOException e) {
- LOG.error(e);
- } catch (InterruptedException e) {
- LOG.info("Deletion thread received interrupt, exiting");
- break;
- }
- }
- }
- }
-
- private static class LockMap<K> {
- private static class CountingReentrantLock<K> extends ReentrantLock {
- private static final long serialVersionUID = 1L;
- private int count;
- private K key;
-
- CountingReentrantLock(K key) {
- super();
- this.count = 0;
- this.key = key;
- }
- }
-
- private Map<K, CountingReentrantLock<K>> locks =
- new HashMap<K, CountingReentrantLock<K>>();
-
- synchronized CountingReentrantLock<K> getLock(K key) {
- CountingReentrantLock<K> lock = locks.get(key);
- if (lock == null) {
- lock = new CountingReentrantLock<K>(key);
- locks.put(key, lock);
- }
-
- lock.count++;
- return lock;
- }
-
- synchronized void returnLock(CountingReentrantLock<K> lock) {
- if (lock.count == 0) {
- throw new IllegalStateException("Returned lock more times than it " +
- "was retrieved");
- }
- lock.count--;
-
- if (lock.count == 0) {
- locks.remove(lock.key);
- }
- }
- }
-
- private static class KeyBuilder {
- private static final int MAX_NUMBER_OF_KEY_ELEMENTS = 10;
- private byte[][] b;
- private boolean[] useSeparator;
- private int index;
- private int length;
-
- public KeyBuilder(int size) {
- b = new byte[size][];
- useSeparator = new boolean[size];
- index = 0;
- length = 0;
- }
-
- public static KeyBuilder newInstance() {
- return new KeyBuilder(MAX_NUMBER_OF_KEY_ELEMENTS);
- }
-
- public KeyBuilder add(String s) {
- return add(s.getBytes(), true);
- }
-
- public KeyBuilder add(byte[] t) {
- return add(t, false);
- }
-
- public KeyBuilder add(byte[] t, boolean sep) {
- b[index] = t;
- useSeparator[index] = sep;
- length += t.length;
- if (sep) {
- length++;
- }
- index++;
- return this;
- }
-
- public byte[] getBytes() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream(length);
- for (int i = 0; i < index; i++) {
- baos.write(b[i]);
- if (i < index-1 && useSeparator[i]) {
- baos.write(0x0);
- }
- }
- return baos.toByteArray();
- }
-
- public byte[] getBytesForLookup() throws IOException {
- ByteArrayOutputStream baos = new ByteArrayOutputStream(length);
- for (int i = 0; i < index; i++) {
- baos.write(b[i]);
- if (useSeparator[i]) {
- baos.write(0x0);
- }
- }
- return baos.toByteArray();
- }
- }
-
- private static class KeyParser {
- private final byte[] b;
- private int offset;
-
- public KeyParser(byte[] b, int offset) {
- this.b = b;
- this.offset = offset;
- }
-
- public String getNextString() throws IOException {
- if (offset >= b.length) {
- throw new IOException(
- "tried to read nonexistent string from byte array");
- }
- int i = 0;
- while (offset+i < b.length && b[offset+i] != 0x0) {
- i++;
- }
- String s = new String(b, offset, i);
- offset = offset + i + 1;
- return s;
- }
-
- public long getNextLong() throws IOException {
- if (offset+8 >= b.length) {
- throw new IOException("byte array ran out when trying to read long");
- }
- long l = readReverseOrderedLong(b, offset);
- offset += 8;
- return l;
- }
-
- public int getOffset() {
- return offset;
- }
- }
-
- @Override
- public TimelineEntity getEntity(String entityId, String entityType,
- EnumSet<Field> fields) throws IOException {
- Long revStartTime = getStartTimeLong(entityId, entityType);
- if (revStartTime == null) {
- return null;
- }
- byte[] prefix = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).add(writeReverseOrderedLong(revStartTime))
- .add(entityId).getBytesForLookup();
-
- DBIterator iterator = null;
- try {
- iterator = db.iterator();
- iterator.seek(prefix);
-
- return getEntity(entityId, entityType, revStartTime, fields, iterator,
- prefix, prefix.length);
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- }
-
- /**
- * Read entity from a db iterator. If no information is found in the
- * specified fields for this entity, return null.
- */
- private static TimelineEntity getEntity(String entityId, String entityType,
- Long startTime, EnumSet<Field> fields, DBIterator iterator,
- byte[] prefix, int prefixlen) throws IOException {
- if (fields == null) {
- fields = EnumSet.allOf(Field.class);
- }
-
- TimelineEntity entity = new TimelineEntity();
- boolean events = false;
- boolean lastEvent = false;
- if (fields.contains(Field.EVENTS)) {
- events = true;
- } else if (fields.contains(Field.LAST_EVENT_ONLY)) {
- lastEvent = true;
- } else {
- entity.setEvents(null);
- }
- boolean relatedEntities = false;
- if (fields.contains(Field.RELATED_ENTITIES)) {
- relatedEntities = true;
- } else {
- entity.setRelatedEntities(null);
- }
- boolean primaryFilters = false;
- if (fields.contains(Field.PRIMARY_FILTERS)) {
- primaryFilters = true;
- } else {
- entity.setPrimaryFilters(null);
- }
- boolean otherInfo = false;
- if (fields.contains(Field.OTHER_INFO)) {
- otherInfo = true;
- } else {
- entity.setOtherInfo(null);
- }
-
- // iterate through the entity's entry, parsing information if it is part
- // of a requested field
- for (; iterator.hasNext(); iterator.next()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefixlen, key)) {
- break;
- }
- if (key.length == prefixlen) {
- continue;
- }
- if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
- if (primaryFilters) {
- addPrimaryFilter(entity, key,
- prefixlen + PRIMARY_FILTERS_COLUMN.length);
- }
- } else if (key[prefixlen] == OTHER_INFO_COLUMN[0]) {
- if (otherInfo) {
- entity.addOtherInfo(parseRemainingKey(key,
- prefixlen + OTHER_INFO_COLUMN.length),
- GenericObjectMapper.read(iterator.peekNext().getValue()));
- }
- } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
- if (relatedEntities) {
- addRelatedEntity(entity, key,
- prefixlen + RELATED_ENTITIES_COLUMN.length);
- }
- } else if (key[prefixlen] == EVENTS_COLUMN[0]) {
- if (events || (lastEvent &&
- entity.getEvents().size() == 0)) {
- TimelineEvent event = getEntityEvent(null, key, prefixlen +
- EVENTS_COLUMN.length, iterator.peekNext().getValue());
- if (event != null) {
- entity.addEvent(event);
- }
- }
- } else {
- if (key[prefixlen] !=
- INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) {
- LOG.warn(String.format("Found unexpected column for entity %s of " +
- "type %s (0x%02x)", entityId, entityType, key[prefixlen]));
- }
- }
- }
-
- entity.setEntityId(entityId);
- entity.setEntityType(entityType);
- entity.setStartTime(startTime);
-
- return entity;
- }
-
- @Override
- public TimelineEvents getEntityTimelines(String entityType,
- SortedSet<String> entityIds, Long limit, Long windowStart,
- Long windowEnd, Set<String> eventType) throws IOException {
- TimelineEvents events = new TimelineEvents();
- if (entityIds == null || entityIds.isEmpty()) {
- return events;
- }
- // create a lexicographically-ordered map from start time to entities
- Map<byte[], List<EntityIdentifier>> startTimeMap = new TreeMap<byte[],
- List<EntityIdentifier>>(new Comparator<byte[]>() {
- @Override
- public int compare(byte[] o1, byte[] o2) {
- return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0,
- o2.length);
- }
- });
- DBIterator iterator = null;
- try {
- // look up start times for the specified entities
- // skip entities with no start time
- for (String entityId : entityIds) {
- byte[] startTime = getStartTime(entityId, entityType);
- if (startTime != null) {
- List<EntityIdentifier> entities = startTimeMap.get(startTime);
- if (entities == null) {
- entities = new ArrayList<EntityIdentifier>();
- startTimeMap.put(startTime, entities);
- }
- entities.add(new EntityIdentifier(entityId, entityType));
- }
- }
- for (Entry<byte[], List<EntityIdentifier>> entry :
- startTimeMap.entrySet()) {
- // look up the events matching the given parameters (limit,
- // start time, end time, event types) for entities whose start times
- // were found and add the entities to the return list
- byte[] revStartTime = entry.getKey();
- for (EntityIdentifier entityIdentifier : entry.getValue()) {
- EventsOfOneEntity entity = new EventsOfOneEntity();
- entity.setEntityId(entityIdentifier.getId());
- entity.setEntityType(entityType);
- events.addEvent(entity);
- KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).add(revStartTime).add(entityIdentifier.getId())
- .add(EVENTS_COLUMN);
- byte[] prefix = kb.getBytesForLookup();
- if (windowEnd == null) {
- windowEnd = Long.MAX_VALUE;
- }
- byte[] revts = writeReverseOrderedLong(windowEnd);
- kb.add(revts);
- byte[] first = kb.getBytesForLookup();
- byte[] last = null;
- if (windowStart != null) {
- last = KeyBuilder.newInstance().add(prefix)
- .add(writeReverseOrderedLong(windowStart)).getBytesForLookup();
- }
- if (limit == null) {
- limit = DEFAULT_LIMIT;
- }
- iterator = db.iterator();
- for (iterator.seek(first); entity.getEvents().size() < limit &&
- iterator.hasNext(); iterator.next()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
- WritableComparator.compareBytes(key, 0, key.length, last, 0,
- last.length) > 0)) {
- break;
- }
- TimelineEvent event = getEntityEvent(eventType, key, prefix.length,
- iterator.peekNext().getValue());
- if (event != null) {
- entity.addEvent(event);
- }
- }
- }
- }
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- return events;
- }
-
- /**
- * Returns true if the byte array begins with the specified prefix.
- */
- private static boolean prefixMatches(byte[] prefix, int prefixlen,
- byte[] b) {
- if (b.length < prefixlen) {
- return false;
- }
- return WritableComparator.compareBytes(prefix, 0, prefixlen, b, 0,
- prefixlen) == 0;
- }
-
- @Override
- public TimelineEntities getEntities(String entityType,
- Long limit, Long windowStart, Long windowEnd, String fromId, Long fromTs,
- NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
- EnumSet<Field> fields) throws IOException {
- if (primaryFilter == null) {
- // if no primary filter is specified, prefix the lookup with
- // ENTITY_ENTRY_PREFIX
- return getEntityByTime(ENTITY_ENTRY_PREFIX, entityType, limit,
- windowStart, windowEnd, fromId, fromTs, secondaryFilters, fields);
- } else {
- // if a primary filter is specified, prefix the lookup with
- // INDEXED_ENTRY_PREFIX + primaryFilterName + primaryFilterValue +
- // ENTITY_ENTRY_PREFIX
- byte[] base = KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX)
- .add(primaryFilter.getName())
- .add(GenericObjectMapper.write(primaryFilter.getValue()), true)
- .add(ENTITY_ENTRY_PREFIX).getBytesForLookup();
- return getEntityByTime(base, entityType, limit, windowStart, windowEnd,
- fromId, fromTs, secondaryFilters, fields);
- }
- }
-
- /**
- * Retrieves a list of entities satisfying given parameters.
- *
- * @param base A byte array prefix for the lookup
- * @param entityType The type of the entity
- * @param limit A limit on the number of entities to return
- * @param starttime The earliest entity start time to retrieve (exclusive)
- * @param endtime The latest entity start time to retrieve (inclusive)
- * @param fromId Retrieve entities starting with this entity
- * @param fromTs Ignore entities with insert timestamp later than this ts
- * @param secondaryFilters Filter pairs that the entities should match
- * @param fields The set of fields to retrieve
- * @return A list of entities
- * @throws IOException
- */
- private TimelineEntities getEntityByTime(byte[] base,
- String entityType, Long limit, Long starttime, Long endtime,
- String fromId, Long fromTs, Collection<NameValuePair> secondaryFilters,
- EnumSet<Field> fields) throws IOException {
- DBIterator iterator = null;
- try {
- KeyBuilder kb = KeyBuilder.newInstance().add(base).add(entityType);
- // only db keys matching the prefix (base + entity type) will be parsed
- byte[] prefix = kb.getBytesForLookup();
- if (endtime == null) {
- // if end time is null, place no restriction on end time
- endtime = Long.MAX_VALUE;
- }
- // construct a first key that will be seeked to using end time or fromId
- byte[] first = null;
- if (fromId != null) {
- Long fromIdStartTime = getStartTimeLong(fromId, entityType);
- if (fromIdStartTime == null) {
- // no start time for provided id, so return empty entities
- return new TimelineEntities();
- }
- if (fromIdStartTime <= endtime) {
- // if provided id's start time falls before the end of the window,
- // use it to construct the seek key
- first = kb.add(writeReverseOrderedLong(fromIdStartTime))
- .add(fromId).getBytesForLookup();
- }
- }
- // if seek key wasn't constructed using fromId, construct it using end ts
- if (first == null) {
- first = kb.add(writeReverseOrderedLong(endtime)).getBytesForLookup();
- }
- byte[] last = null;
- if (starttime != null) {
- // if start time is not null, set a last key that will not be
- // iterated past
- last = KeyBuilder.newInstance().add(base).add(entityType)
- .add(writeReverseOrderedLong(starttime)).getBytesForLookup();
- }
- if (limit == null) {
- // if limit is not specified, use the default
- limit = DEFAULT_LIMIT;
- }
-
- TimelineEntities entities = new TimelineEntities();
- iterator = db.iterator();
- iterator.seek(first);
- // iterate until one of the following conditions is met: limit is
- // reached, there are no more keys, the key prefix no longer matches,
- // or a start time has been specified and reached/exceeded
- while (entities.getEntities().size() < limit && iterator.hasNext()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefix.length, key) || (last != null &&
- WritableComparator.compareBytes(key, 0, key.length, last, 0,
- last.length) > 0)) {
- break;
- }
- // read the start time and entity id from the current key
- KeyParser kp = new KeyParser(key, prefix.length);
- Long startTime = kp.getNextLong();
- String entityId = kp.getNextString();
-
- if (fromTs != null) {
- long insertTime = readReverseOrderedLong(iterator.peekNext()
- .getValue(), 0);
- if (insertTime > fromTs) {
- byte[] firstKey = key;
- while (iterator.hasNext() && prefixMatches(firstKey,
- kp.getOffset(), key)) {
- iterator.next();
- key = iterator.peekNext().getKey();
- }
- continue;
- }
- }
-
- // parse the entity that owns this key, iterating over all keys for
- // the entity
- TimelineEntity entity = getEntity(entityId, entityType, startTime,
- fields, iterator, key, kp.getOffset());
- // determine if the retrieved entity matches the provided secondary
- // filters, and if so add it to the list of entities to return
- boolean filterPassed = true;
- if (secondaryFilters != null) {
- for (NameValuePair filter : secondaryFilters) {
- Object v = entity.getOtherInfo().get(filter.getName());
- if (v == null) {
- Set<Object> vs = entity.getPrimaryFilters()
- .get(filter.getName());
- if (vs != null && !vs.contains(filter.getValue())) {
- filterPassed = false;
- break;
- }
- } else if (!v.equals(filter.getValue())) {
- filterPassed = false;
- break;
- }
- }
- }
- if (filterPassed) {
- entities.addEntity(entity);
- }
- }
- return entities;
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- }
-
- /**
- * Put a single entity. If there is an error, add a TimelinePutError to the
- * given response.
- */
- private void put(TimelineEntity entity, TimelinePutResponse response) {
- LockMap.CountingReentrantLock<EntityIdentifier> lock =
- writeLocks.getLock(new EntityIdentifier(entity.getEntityId(),
- entity.getEntityType()));
- lock.lock();
- WriteBatch writeBatch = null;
- List<EntityIdentifier> relatedEntitiesWithoutStartTimes =
- new ArrayList<EntityIdentifier>();
- byte[] revStartTime = null;
- try {
- writeBatch = db.createWriteBatch();
- List<TimelineEvent> events = entity.getEvents();
- // look up the start time for the entity
- StartAndInsertTime startAndInsertTime = getAndSetStartTime(
- entity.getEntityId(), entity.getEntityType(),
- entity.getStartTime(), events);
- if (startAndInsertTime == null) {
- // if no start time is found, add an error and return
- TimelinePutError error = new TimelinePutError();
- error.setEntityId(entity.getEntityId());
- error.setEntityType(entity.getEntityType());
- error.setErrorCode(TimelinePutError.NO_START_TIME);
- response.addError(error);
- return;
- }
- revStartTime = writeReverseOrderedLong(startAndInsertTime
- .startTime);
-
- Map<String, Set<Object>> primaryFilters = entity.getPrimaryFilters();
-
- // write entity marker
- byte[] markerKey = createEntityMarkerKey(entity.getEntityId(),
- entity.getEntityType(), revStartTime);
- byte[] markerValue = writeReverseOrderedLong(startAndInsertTime
- .insertTime);
- writeBatch.put(markerKey, markerValue);
- writePrimaryFilterEntries(writeBatch, primaryFilters, markerKey,
- markerValue);
-
- // write event entries
- if (events != null && !events.isEmpty()) {
- for (TimelineEvent event : events) {
- byte[] revts = writeReverseOrderedLong(event.getTimestamp());
- byte[] key = createEntityEventKey(entity.getEntityId(),
- entity.getEntityType(), revStartTime, revts,
- event.getEventType());
- byte[] value = GenericObjectMapper.write(event.getEventInfo());
- writeBatch.put(key, value);
- writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
- }
- }
-
- // write related entity entries
- Map<String, Set<String>> relatedEntities =
- entity.getRelatedEntities();
- if (relatedEntities != null && !relatedEntities.isEmpty()) {
- for (Entry<String, Set<String>> relatedEntityList :
- relatedEntities.entrySet()) {
- String relatedEntityType = relatedEntityList.getKey();
- for (String relatedEntityId : relatedEntityList.getValue()) {
- // invisible "reverse" entries (entity -> related entity)
- byte[] key = createReverseRelatedEntityKey(entity.getEntityId(),
- entity.getEntityType(), revStartTime, relatedEntityId,
- relatedEntityType);
- writeBatch.put(key, EMPTY_BYTES);
- // look up start time of related entity
- byte[] relatedEntityStartTime = getStartTime(relatedEntityId,
- relatedEntityType);
- // delay writing the related entity if no start time is found
- if (relatedEntityStartTime == null) {
- relatedEntitiesWithoutStartTimes.add(
- new EntityIdentifier(relatedEntityId, relatedEntityType));
- continue;
- }
- // write "forward" entry (related entity -> entity)
- key = createRelatedEntityKey(relatedEntityId,
- relatedEntityType, relatedEntityStartTime,
- entity.getEntityId(), entity.getEntityType());
- writeBatch.put(key, EMPTY_BYTES);
- }
- }
- }
-
- // write primary filter entries
- if (primaryFilters != null && !primaryFilters.isEmpty()) {
- for (Entry<String, Set<Object>> primaryFilter :
- primaryFilters.entrySet()) {
- for (Object primaryFilterValue : primaryFilter.getValue()) {
- byte[] key = createPrimaryFilterKey(entity.getEntityId(),
- entity.getEntityType(), revStartTime,
- primaryFilter.getKey(), primaryFilterValue);
- writeBatch.put(key, EMPTY_BYTES);
- writePrimaryFilterEntries(writeBatch, primaryFilters, key,
- EMPTY_BYTES);
- }
- }
- }
-
- // write other info entries
- Map<String, Object> otherInfo = entity.getOtherInfo();
- if (otherInfo != null && !otherInfo.isEmpty()) {
- for (Entry<String, Object> i : otherInfo.entrySet()) {
- byte[] key = createOtherInfoKey(entity.getEntityId(),
- entity.getEntityType(), revStartTime, i.getKey());
- byte[] value = GenericObjectMapper.write(i.getValue());
- writeBatch.put(key, value);
- writePrimaryFilterEntries(writeBatch, primaryFilters, key, value);
- }
- }
- db.write(writeBatch);
- } catch (IOException e) {
- LOG.error("Error putting entity " + entity.getEntityId() +
- " of type " + entity.getEntityType(), e);
- TimelinePutError error = new TimelinePutError();
- error.setEntityId(entity.getEntityId());
- error.setEntityType(entity.getEntityType());
- error.setErrorCode(TimelinePutError.IO_EXCEPTION);
- response.addError(error);
- } finally {
- lock.unlock();
- writeLocks.returnLock(lock);
- IOUtils.cleanup(LOG, writeBatch);
- }
-
- for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) {
- lock = writeLocks.getLock(relatedEntity);
- lock.lock();
- try {
- StartAndInsertTime relatedEntityStartAndInsertTime =
- getAndSetStartTime(relatedEntity.getId(), relatedEntity.getType(),
- readReverseOrderedLong(revStartTime, 0), null);
- if (relatedEntityStartAndInsertTime == null) {
- throw new IOException("Error setting start time for related entity");
- }
- byte[] relatedEntityStartTime = writeReverseOrderedLong(
- relatedEntityStartAndInsertTime.startTime);
- db.put(createRelatedEntityKey(relatedEntity.getId(),
- relatedEntity.getType(), relatedEntityStartTime,
- entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES);
- db.put(createEntityMarkerKey(relatedEntity.getId(),
- relatedEntity.getType(), relatedEntityStartTime),
- writeReverseOrderedLong(relatedEntityStartAndInsertTime
- .insertTime));
- } catch (IOException e) {
- LOG.error("Error putting related entity " + relatedEntity.getId() +
- " of type " + relatedEntity.getType() + " for entity " +
- entity.getEntityId() + " of type " + entity.getEntityType(), e);
- TimelinePutError error = new TimelinePutError();
- error.setEntityId(entity.getEntityId());
- error.setEntityType(entity.getEntityType());
- error.setErrorCode(TimelinePutError.IO_EXCEPTION);
- response.addError(error);
- } finally {
- lock.unlock();
- writeLocks.returnLock(lock);
- }
- }
- }
-
- /**
- * For a given key / value pair that has been written to the db,
- * write additional entries to the db for each primary filter.
- */
- private static void writePrimaryFilterEntries(WriteBatch writeBatch,
- Map<String, Set<Object>> primaryFilters, byte[] key, byte[] value)
- throws IOException {
- if (primaryFilters != null && !primaryFilters.isEmpty()) {
- for (Entry<String, Set<Object>> pf : primaryFilters.entrySet()) {
- for (Object pfval : pf.getValue()) {
- writeBatch.put(addPrimaryFilterToKey(pf.getKey(), pfval,
- key), value);
- }
- }
- }
- }
-
- @Override
- public TimelinePutResponse put(TimelineEntities entities) {
- try {
- deleteLock.readLock().lock();
- TimelinePutResponse response = new TimelinePutResponse();
- for (TimelineEntity entity : entities.getEntities()) {
- put(entity, response);
- }
- return response;
- } finally {
- deleteLock.readLock().unlock();
- }
- }
-
- /**
- * Get the unique start time for a given entity as a byte array that sorts
- * the timestamps in reverse order (see {@link
- * GenericObjectMapper#writeReverseOrderedLong(long)}).
- *
- * @param entityId The id of the entity
- * @param entityType The type of the entity
- * @return A byte array, null if not found
- * @throws IOException
- */
- private byte[] getStartTime(String entityId, String entityType)
- throws IOException {
- Long l = getStartTimeLong(entityId, entityType);
- return l == null ? null : writeReverseOrderedLong(l);
- }
-
- /**
- * Get the unique start time for a given entity as a Long.
- *
- * @param entityId The id of the entity
- * @param entityType The type of the entity
- * @return A Long, null if not found
- * @throws IOException
- */
- private Long getStartTimeLong(String entityId, String entityType)
- throws IOException {
- EntityIdentifier entity = new EntityIdentifier(entityId, entityType);
- // start time is not provided, so try to look it up
- if (startTimeReadCache.containsKey(entity)) {
- // found the start time in the cache
- return startTimeReadCache.get(entity);
- } else {
- // try to look up the start time in the db
- byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType());
- byte[] v = db.get(b);
- if (v == null) {
- // did not find the start time in the db
- return null;
- } else {
- // found the start time in the db
- Long l = readReverseOrderedLong(v, 0);
- startTimeReadCache.put(entity, l);
- return l;
- }
- }
- }
-
- /**
- * Get the unique start time for a given entity as a byte array that sorts
- * the timestamps in reverse order (see {@link
- * GenericObjectMapper#writeReverseOrderedLong(long)}). If the start time
- * doesn't exist, set it based on the information provided. Should only be
- * called when a lock has been obtained on the entity.
- *
- * @param entityId The id of the entity
- * @param entityType The type of the entity
- * @param startTime The start time of the entity, or null
- * @param events A list of events for the entity, or null
- * @return A StartAndInsertTime
- * @throws IOException
- */
- private StartAndInsertTime getAndSetStartTime(String entityId,
- String entityType, Long startTime, List<TimelineEvent> events)
- throws IOException {
- EntityIdentifier entity = new EntityIdentifier(entityId, entityType);
- if (startTime == null) {
- // start time is not provided, so try to look it up
- if (startTimeWriteCache.containsKey(entity)) {
- // found the start time in the cache
- return startTimeWriteCache.get(entity);
- } else {
- if (events != null) {
- // prepare a start time from events in case it is needed
- Long min = Long.MAX_VALUE;
- for (TimelineEvent e : events) {
- if (min > e.getTimestamp()) {
- min = e.getTimestamp();
- }
- }
- startTime = min;
- }
- return checkStartTimeInDb(entity, startTime);
- }
- } else {
- // start time is provided
- if (startTimeWriteCache.containsKey(entity)) {
- // always use start time from cache if it exists
- return startTimeWriteCache.get(entity);
- } else {
- // check the provided start time matches the db
- return checkStartTimeInDb(entity, startTime);
- }
- }
- }
-
- /**
- * Checks db for start time and returns it if it exists. If it doesn't
- * exist, writes the suggested start time (if it is not null). This is
- * only called when the start time is not found in the cache,
- * so it adds it back into the cache if it is found. Should only be called
- * when a lock has been obtained on the entity.
- */
- private StartAndInsertTime checkStartTimeInDb(EntityIdentifier entity,
- Long suggestedStartTime) throws IOException {
- StartAndInsertTime startAndInsertTime = null;
- // create lookup key for start time
- byte[] b = createStartTimeLookupKey(entity.getId(), entity.getType());
- // retrieve value for key
- byte[] v = db.get(b);
- if (v == null) {
- // start time doesn't exist in db
- if (suggestedStartTime == null) {
- return null;
- }
- startAndInsertTime = new StartAndInsertTime(suggestedStartTime,
- System.currentTimeMillis());
-
- // write suggested start time
- v = new byte[16];
- writeReverseOrderedLong(suggestedStartTime, v, 0);
- writeReverseOrderedLong(startAndInsertTime.insertTime, v, 8);
- WriteOptions writeOptions = new WriteOptions();
- writeOptions.sync(true);
- db.put(b, v, writeOptions);
- } else {
- // found start time in db, so ignore suggested start time
- startAndInsertTime = new StartAndInsertTime(readReverseOrderedLong(v, 0),
- readReverseOrderedLong(v, 8));
- }
- startTimeWriteCache.put(entity, startAndInsertTime);
- startTimeReadCache.put(entity, startAndInsertTime.startTime);
- return startAndInsertTime;
- }
-
- /**
- * Creates a key for looking up the start time of a given entity,
- * of the form START_TIME_LOOKUP_PREFIX + entity type + entity id.
- */
- private static byte[] createStartTimeLookupKey(String entityId,
- String entityType) throws IOException {
- return KeyBuilder.newInstance().add(START_TIME_LOOKUP_PREFIX)
- .add(entityType).add(entityId).getBytes();
- }
-
- /**
- * Creates an entity marker, serializing ENTITY_ENTRY_PREFIX + entity type +
- * revstarttime + entity id.
- */
- private static byte[] createEntityMarkerKey(String entityId,
- String entityType, byte[] revStartTime) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).add(revStartTime).add(entityId).getBytesForLookup();
- }
-
- /**
- * Creates an index entry for the given key of the form
- * INDEXED_ENTRY_PREFIX + primaryfiltername + primaryfiltervalue + key.
- */
- private static byte[] addPrimaryFilterToKey(String primaryFilterName,
- Object primaryFilterValue, byte[] key) throws IOException {
- return KeyBuilder.newInstance().add(INDEXED_ENTRY_PREFIX)
- .add(primaryFilterName)
- .add(GenericObjectMapper.write(primaryFilterValue), true).add(key)
- .getBytes();
- }
-
- /**
- * Creates an event key, serializing ENTITY_ENTRY_PREFIX + entity type +
- * revstarttime + entity id + EVENTS_COLUMN + reveventtimestamp + event type.
- */
- private static byte[] createEntityEventKey(String entityId,
- String entityType, byte[] revStartTime, byte[] revEventTimestamp,
- String eventType) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).add(revStartTime).add(entityId).add(EVENTS_COLUMN)
- .add(revEventTimestamp).add(eventType).getBytes();
- }
-
- /**
- * Creates an event object from the given key, offset, and value. If the
- * event type is not contained in the specified set of event types,
- * returns null.
- */
- private static TimelineEvent getEntityEvent(Set<String> eventTypes,
- byte[] key, int offset, byte[] value) throws IOException {
- KeyParser kp = new KeyParser(key, offset);
- long ts = kp.getNextLong();
- String tstype = kp.getNextString();
- if (eventTypes == null || eventTypes.contains(tstype)) {
- TimelineEvent event = new TimelineEvent();
- event.setTimestamp(ts);
- event.setEventType(tstype);
- Object o = GenericObjectMapper.read(value);
- if (o == null) {
- event.setEventInfo(null);
- } else if (o instanceof Map) {
- @SuppressWarnings("unchecked")
- Map<String, Object> m = (Map<String, Object>) o;
- event.setEventInfo(m);
- } else {
- throw new IOException("Couldn't deserialize event info map");
- }
- return event;
- }
- return null;
- }
-
- /**
- * Creates a primary filter key, serializing ENTITY_ENTRY_PREFIX +
- * entity type + revstarttime + entity id + PRIMARY_FILTERS_COLUMN + name +
- * value.
- */
- private static byte[] createPrimaryFilterKey(String entityId,
- String entityType, byte[] revStartTime, String name, Object value)
- throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType)
- .add(revStartTime).add(entityId).add(PRIMARY_FILTERS_COLUMN).add(name)
- .add(GenericObjectMapper.write(value)).getBytes();
- }
-
- /**
- * Parses the primary filter from the given key at the given offset and
- * adds it to the given entity.
- */
- private static void addPrimaryFilter(TimelineEntity entity, byte[] key,
- int offset) throws IOException {
- KeyParser kp = new KeyParser(key, offset);
- String name = kp.getNextString();
- Object value = GenericObjectMapper.read(key, kp.getOffset());
- entity.addPrimaryFilter(name, value);
- }
-
- /**
- * Creates an other info key, serializing ENTITY_ENTRY_PREFIX + entity type +
- * revstarttime + entity id + OTHER_INFO_COLUMN + name.
- */
- private static byte[] createOtherInfoKey(String entityId, String entityType,
- byte[] revStartTime, String name) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType)
- .add(revStartTime).add(entityId).add(OTHER_INFO_COLUMN).add(name)
- .getBytes();
- }
-
- /**
- * Creates a string representation of the byte array from the given offset
- * to the end of the array (for parsing other info keys).
- */
- private static String parseRemainingKey(byte[] b, int offset) {
- return new String(b, offset, b.length - offset);
- }
-
- /**
- * Creates a related entity key, serializing ENTITY_ENTRY_PREFIX +
- * entity type + revstarttime + entity id + RELATED_ENTITIES_COLUMN +
- * relatedentity type + relatedentity id.
- */
- private static byte[] createRelatedEntityKey(String entityId,
- String entityType, byte[] revStartTime, String relatedEntityId,
- String relatedEntityType) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType)
- .add(revStartTime).add(entityId).add(RELATED_ENTITIES_COLUMN)
- .add(relatedEntityType).add(relatedEntityId).getBytes();
- }
-
- /**
- * Parses the related entity from the given key at the given offset and
- * adds it to the given entity.
- */
- private static void addRelatedEntity(TimelineEntity entity, byte[] key,
- int offset) throws IOException {
- KeyParser kp = new KeyParser(key, offset);
- String type = kp.getNextString();
- String id = kp.getNextString();
- entity.addRelatedEntity(type, id);
- }
-
- /**
- * Creates a reverse related entity key, serializing ENTITY_ENTRY_PREFIX +
- * entity type + revstarttime + entity id +
- * INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN +
- * relatedentity type + relatedentity id.
- */
- private static byte[] createReverseRelatedEntityKey(String entityId,
- String entityType, byte[] revStartTime, String relatedEntityId,
- String relatedEntityType) throws IOException {
- return KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX).add(entityType)
- .add(revStartTime).add(entityId)
- .add(INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN)
- .add(relatedEntityType).add(relatedEntityId).getBytes();
- }
-
- /**
- * Clears the cache to test reloading start times from leveldb (only for
- * testing).
- */
- @VisibleForTesting
- void clearStartTimeCache() {
- startTimeWriteCache.clear();
- startTimeReadCache.clear();
- }
-
- @VisibleForTesting
- static int getStartTimeReadCacheSize(Configuration conf) {
- return conf.getInt(
- YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE,
- YarnConfiguration.
- DEFAULT_TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE);
- }
-
- @VisibleForTesting
- static int getStartTimeWriteCacheSize(Configuration conf) {
- return conf.getInt(
- YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE,
- YarnConfiguration.
- DEFAULT_TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE);
- }
-
- // warning is suppressed to prevent eclipse from noting unclosed resource
- @SuppressWarnings("resource")
- @VisibleForTesting
- List<String> getEntityTypes() throws IOException {
- DBIterator iterator = null;
- try {
- iterator = getDbIterator(false);
- List<String> entityTypes = new ArrayList<String>();
- iterator.seek(ENTITY_ENTRY_PREFIX);
- while (iterator.hasNext()) {
- byte[] key = iterator.peekNext().getKey();
- if (key[0] != ENTITY_ENTRY_PREFIX[0]) {
- break;
- }
- KeyParser kp = new KeyParser(key,
- ENTITY_ENTRY_PREFIX.length);
- String entityType = kp.getNextString();
- entityTypes.add(entityType);
- byte[] lookupKey = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType).getBytesForLookup();
- if (lookupKey[lookupKey.length - 1] != 0x0) {
- throw new IOException("Found unexpected end byte in lookup key");
- }
- lookupKey[lookupKey.length - 1] = 0x1;
- iterator.seek(lookupKey);
- }
- return entityTypes;
- } finally {
- IOUtils.cleanup(LOG, iterator);
- }
- }
-
- /**
- * Finds all keys in the db that have a given prefix and deletes them on
- * the given write batch.
- */
- private void deleteKeysWithPrefix(WriteBatch writeBatch, byte[] prefix,
- DBIterator iterator) {
- for (iterator.seek(prefix); iterator.hasNext(); iterator.next()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(prefix, prefix.length, key)) {
- break;
- }
- writeBatch.delete(key);
- }
- }
-
- @VisibleForTesting
- boolean deleteNextEntity(String entityType, byte[] reverseTimestamp,
- DBIterator iterator, DBIterator pfIterator, boolean seeked)
- throws IOException {
- WriteBatch writeBatch = null;
- try {
- KeyBuilder kb = KeyBuilder.newInstance().add(ENTITY_ENTRY_PREFIX)
- .add(entityType);
- byte[] typePrefix = kb.getBytesForLookup();
- kb.add(reverseTimestamp);
- if (!seeked) {
- iterator.seek(kb.getBytesForLookup());
- }
- if (!iterator.hasNext()) {
- return false;
- }
- byte[] entityKey = iterator.peekNext().getKey();
- if (!prefixMatches(typePrefix, typePrefix.length, entityKey)) {
- return false;
- }
-
- // read the start time and entity id from the current key
- KeyParser kp = new KeyParser(entityKey, typePrefix.length + 8);
- String entityId = kp.getNextString();
- int prefixlen = kp.getOffset();
- byte[] deletePrefix = new byte[prefixlen];
- System.arraycopy(entityKey, 0, deletePrefix, 0, prefixlen);
-
- writeBatch = db.createWriteBatch();
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting entity type:" + entityType + " id:" + entityId);
- }
- // remove start time from cache and db
- writeBatch.delete(createStartTimeLookupKey(entityId, entityType));
- EntityIdentifier entityIdentifier =
- new EntityIdentifier(entityId, entityType);
- startTimeReadCache.remove(entityIdentifier);
- startTimeWriteCache.remove(entityIdentifier);
-
- // delete current entity
- for (; iterator.hasNext(); iterator.next()) {
- byte[] key = iterator.peekNext().getKey();
- if (!prefixMatches(entityKey, prefixlen, key)) {
- break;
- }
- writeBatch.delete(key);
-
- if (key.length == prefixlen) {
- continue;
- }
- if (key[prefixlen] == PRIMARY_FILTERS_COLUMN[0]) {
- kp = new KeyParser(key,
- prefixlen + PRIMARY_FILTERS_COLUMN.length);
- String name = kp.getNextString();
- Object value = GenericObjectMapper.read(key, kp.getOffset());
- deleteKeysWithPrefix(writeBatch, addPrimaryFilterToKey(name, value,
- deletePrefix), pfIterator);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting entity type:" + entityType + " id:" +
- entityId + " primary filter entry " + name + " " +
- value);
- }
- } else if (key[prefixlen] == RELATED_ENTITIES_COLUMN[0]) {
- kp = new KeyParser(key,
- prefixlen + RELATED_ENTITIES_COLUMN.length);
- String type = kp.getNextString();
- String id = kp.getNextString();
- byte[] relatedEntityStartTime = getStartTime(id, type);
- if (relatedEntityStartTime == null) {
- LOG.warn("Found no start time for " +
- "related entity " + id + " of type " + type + " while " +
- "deleting " + entityId + " of type " + entityType);
- continue;
- }
- writeBatch.delete(createReverseRelatedEntityKey(id, type,
- relatedEntityStartTime, entityId, entityType));
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting entity type:" + entityType + " id:" +
- entityId + " from invisible reverse related entity " +
- "entry of type:" + type + " id:" + id);
- }
- } else if (key[prefixlen] ==
- INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN[0]) {
- kp = new KeyParser(key, prefixlen +
- INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN.length);
- String type = kp.getNextString();
- String id = kp.getNextString();
- byte[] relatedEntityStartTime = getStartTime(id, type);
- if (relatedEntityStartTime == null) {
- LOG.warn("Found no start time for reverse " +
- "related entity " + id + " of type " + type + " while " +
- "deleting " + entityId + " of type " + entityType);
- continue;
- }
- writeBatch.delete(createRelatedEntityKey(id, type,
- relatedEntityStartTime, entityId, entityType));
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting entity type:" + entityType + " id:" +
- entityId + " from related entity entry of type:" +
- type + " id:" + id);
- }
- }
- }
- WriteOptions writeOptions = new WriteOptions();
- writeOptions.sync(true);
- db.write(writeBatch, writeOptions);
- return true;
- } finally {
- IOUtils.cleanup(LOG, writeBatch);
- }
- }
-
- /**
- * Discards entities with start timestamp less than or equal to the given
- * timestamp.
- */
- @VisibleForTesting
- void discardOldEntities(long timestamp)
- throws IOException, InterruptedException {
- byte[] reverseTimestamp = writeReverseOrderedLong(timestamp);
- long totalCount = 0;
- long t1 = System.currentTimeMillis();
- try {
- List<String> entityTypes = getEntityTypes();
- for (String entityType : entityTypes) {
- DBIterator iterator = null;
- DBIterator pfIterator = null;
- long typeCount = 0;
- try {
- deleteLock.writeLock().lock();
- iterator = getDbIterator(false);
- pfIterator = getDbIterator(false);
-
- if (deletionThread != null && deletionThread.isInterrupted()) {
- throw new InterruptedException();
- }
- boolean seeked = false;
- while (deleteNextEntity(entityType, reverseTimestamp, iterator,
- pfIterator, seeked)) {
- typeCount++;
- totalCount++;
- seeked = true;
- if (deletionThread != null && deletionThread.isInterrupted()) {
- throw new InterruptedException();
- }
- }
- } catch (IOException e) {
- LOG.error("Got IOException while deleting entities for type " +
- entityType + ", continuing to next type", e);
- } finally {
- IOUtils.cleanup(LOG, iterator, pfIterator);
- deleteLock.writeLock().unlock();
- if (typeCount > 0) {
- LOG.info("Deleted " + typeCount + " entities of type " +
- entityType);
- }
- }
- }
- } finally {
- long t2 = System.currentTimeMillis();
- LOG.info("Discarded " + totalCount + " entities for timestamp " +
- timestamp + " and earlier in " + (t2 - t1) / 1000.0 + " seconds");
- }
- }
-
- @VisibleForTesting
- DBIterator getDbIterator(boolean fillCache) {
- ReadOptions readOptions = new ReadOptions();
- readOptions.fillCache(fillCache);
- return db.iterator(readOptions);
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java
deleted file mode 100644
index 86ac1f8..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java
+++ /dev/null
@@ -1,360 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.PriorityQueue;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
-
-/**
- * In-memory implementation of {@link TimelineStore}. This
- * implementation is for test purpose only. If users improperly instantiate it,
- * they may encounter reading and writing history data in different memory
- * store.
- *
- */
-@Private
-@Unstable
-public class MemoryTimelineStore
- extends AbstractService implements TimelineStore {
-
- private Map<EntityIdentifier, TimelineEntity> entities =
- new HashMap<EntityIdentifier, TimelineEntity>();
- private Map<EntityIdentifier, Long> entityInsertTimes =
- new HashMap<EntityIdentifier, Long>();
-
- public MemoryTimelineStore() {
- super(MemoryTimelineStore.class.getName());
- }
-
- @Override
- public TimelineEntities getEntities(String entityType, Long limit,
- Long windowStart, Long windowEnd, String fromId, Long fromTs,
- NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
- EnumSet<Field> fields) {
- if (limit == null) {
- limit = DEFAULT_LIMIT;
- }
- if (windowStart == null) {
- windowStart = Long.MIN_VALUE;
- }
- if (windowEnd == null) {
- windowEnd = Long.MAX_VALUE;
- }
- if (fields == null) {
- fields = EnumSet.allOf(Field.class);
- }
-
- Iterator<TimelineEntity> entityIterator = null;
- if (fromId != null) {
- TimelineEntity firstEntity = entities.get(new EntityIdentifier(fromId,
- entityType));
- if (firstEntity == null) {
- return new TimelineEntities();
- } else {
- entityIterator = new TreeSet<TimelineEntity>(entities.values())
- .tailSet(firstEntity, true).iterator();
- }
- }
- if (entityIterator == null) {
- entityIterator = new PriorityQueue<TimelineEntity>(entities.values())
- .iterator();
- }
-
- List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
- while (entityIterator.hasNext()) {
- TimelineEntity entity = entityIterator.next();
- if (entitiesSelected.size() >= limit) {
- break;
- }
- if (!entity.getEntityType().equals(entityType)) {
- continue;
- }
- if (entity.getStartTime() <= windowStart) {
- continue;
- }
- if (entity.getStartTime() > windowEnd) {
- continue;
- }
- if (fromTs != null && entityInsertTimes.get(new EntityIdentifier(
- entity.getEntityId(), entity.getEntityType())) > fromTs) {
- continue;
- }
- if (primaryFilter != null &&
- !matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
- continue;
- }
- if (secondaryFilters != null) { // AND logic
- boolean flag = true;
- for (NameValuePair secondaryFilter : secondaryFilters) {
- if (secondaryFilter != null && !matchPrimaryFilter(
- entity.getPrimaryFilters(), secondaryFilter) &&
- !matchFilter(entity.getOtherInfo(), secondaryFilter)) {
- flag = false;
- break;
- }
- }
- if (!flag) {
- continue;
- }
- }
- entitiesSelected.add(entity);
- }
- List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
- for (TimelineEntity entitySelected : entitiesSelected) {
- entitiesToReturn.add(maskFields(entitySelected, fields));
- }
- Collections.sort(entitiesToReturn);
- TimelineEntities entitiesWrapper = new TimelineEntities();
- entitiesWrapper.setEntities(entitiesToReturn);
- return entitiesWrapper;
- }
-
- @Override
- public TimelineEntity getEntity(String entityId, String entityType,
- EnumSet<Field> fieldsToRetrieve) {
- if (fieldsToRetrieve == null) {
- fieldsToRetrieve = EnumSet.allOf(Field.class);
- }
- TimelineEntity entity = entities.get(new EntityIdentifier(entityId, entityType));
- if (entity == null) {
- return null;
- } else {
- return maskFields(entity, fieldsToRetrieve);
- }
- }
-
- @Override
- public TimelineEvents getEntityTimelines(String entityType,
- SortedSet<String> entityIds, Long limit, Long windowStart,
- Long windowEnd,
- Set<String> eventTypes) {
- TimelineEvents allEvents = new TimelineEvents();
- if (entityIds == null) {
- return allEvents;
- }
- if (limit == null) {
- limit = DEFAULT_LIMIT;
- }
- if (windowStart == null) {
- windowStart = Long.MIN_VALUE;
- }
- if (windowEnd == null) {
- windowEnd = Long.MAX_VALUE;
- }
- for (String entityId : entityIds) {
- EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
- TimelineEntity entity = entities.get(entityID);
- if (entity == null) {
- continue;
- }
- EventsOfOneEntity events = new EventsOfOneEntity();
- events.setEntityId(entityId);
- events.setEntityType(entityType);
- for (TimelineEvent event : entity.getEvents()) {
- if (events.getEvents().size() >= limit) {
- break;
- }
- if (event.getTimestamp() <= windowStart) {
- continue;
- }
- if (event.getTimestamp() > windowEnd) {
- continue;
- }
- if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
- continue;
- }
- events.addEvent(event);
- }
- allEvents.addEvent(events);
- }
- return allEvents;
- }
-
- @Override
- public TimelinePutResponse put(TimelineEntities data) {
- TimelinePutResponse response = new TimelinePutResponse();
- for (TimelineEntity entity : data.getEntities()) {
- EntityIdentifier entityId =
- new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
- // store entity info in memory
- TimelineEntity existingEntity = entities.get(entityId);
- if (existingEntity == null) {
- existingEntity = new TimelineEntity();
- existingEntity.setEntityId(entity.getEntityId());
- existingEntity.setEntityType(entity.getEntityType());
- existingEntity.setStartTime(entity.getStartTime());
- entities.put(entityId, existingEntity);
- entityInsertTimes.put(entityId, System.currentTimeMillis());
- }
- if (entity.getEvents() != null) {
- if (existingEntity.getEvents() == null) {
- existingEntity.setEvents(entity.getEvents());
- } else {
- existingEntity.addEvents(entity.getEvents());
- }
- Collections.sort(existingEntity.getEvents());
- }
- // check startTime
- if (existingEntity.getStartTime() == null) {
- if (existingEntity.getEvents() == null
- || existingEntity.getEvents().isEmpty()) {
- TimelinePutError error = new TimelinePutError();
- error.setEntityId(entityId.getId());
- error.setEntityType(entityId.getType());
- error.setErrorCode(TimelinePutError.NO_START_TIME);
- response.addError(error);
- entities.remove(entityId);
- entityInsertTimes.remove(entityId);
- continue;
- } else {
- Long min = Long.MAX_VALUE;
- for (TimelineEvent e : entity.getEvents()) {
- if (min > e.getTimestamp()) {
- min = e.getTimestamp();
- }
- }
- existingEntity.setStartTime(min);
- }
- }
- if (entity.getPrimaryFilters() != null) {
- if (existingEntity.getPrimaryFilters() == null) {
- existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
- }
- for (Entry<String, Set<Object>> pf :
- entity.getPrimaryFilters().entrySet()) {
- for (Object pfo : pf.getValue()) {
- existingEntity.addPrimaryFilter(pf.getKey(), maybeConvert(pfo));
- }
- }
- }
- if (entity.getOtherInfo() != null) {
- if (existingEntity.getOtherInfo() == null) {
- existingEntity.setOtherInfo(new HashMap<String, Object>());
- }
- for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
- existingEntity.addOtherInfo(info.getKey(),
- maybeConvert(info.getValue()));
- }
- }
- // relate it to other entities
- if (entity.getRelatedEntities() == null) {
- continue;
- }
- for (Map.Entry<String, Set<String>> partRelatedEntities : entity
- .getRelatedEntities().entrySet()) {
- if (partRelatedEntities == null) {
- continue;
- }
- for (String idStr : partRelatedEntities.getValue()) {
- EntityIdentifier relatedEntityId =
- new EntityIdentifier(idStr, partRelatedEntities.getKey());
- TimelineEntity relatedEntity = entities.get(relatedEntityId);
- if (relatedEntity != null) {
- relatedEntity.addRelatedEntity(
- existingEntity.getEntityType(), existingEntity.getEntityId());
- } else {
- relatedEntity = new TimelineEntity();
- relatedEntity.setEntityId(relatedEntityId.getId());
- relatedEntity.setEntityType(relatedEntityId.getType());
- relatedEntity.setStartTime(existingEntity.getStartTime());
- relatedEntity.addRelatedEntity(existingEntity.getEntityType(),
- existingEntity.getEntityId());
- entities.put(relatedEntityId, relatedEntity);
- entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
- }
- }
- }
- }
- return response;
- }
-
- private static TimelineEntity maskFields(
- TimelineEntity entity, EnumSet<Field> fields) {
- // Conceal the fields that are not going to be exposed
- TimelineEntity entityToReturn = new TimelineEntity();
- entityToReturn.setEntityId(entity.getEntityId());
- entityToReturn.setEntityType(entity.getEntityType());
- entityToReturn.setStartTime(entity.getStartTime());
- entityToReturn.setEvents(fields.contains(Field.EVENTS) ?
- entity.getEvents() : fields.contains(Field.LAST_EVENT_ONLY) ?
- Arrays.asList(entity.getEvents().get(0)) : null);
- entityToReturn.setRelatedEntities(fields.contains(Field.RELATED_ENTITIES) ?
- entity.getRelatedEntities() : null);
- entityToReturn.setPrimaryFilters(fields.contains(Field.PRIMARY_FILTERS) ?
- entity.getPrimaryFilters() : null);
- entityToReturn.setOtherInfo(fields.contains(Field.OTHER_INFO) ?
- entity.getOtherInfo() : null);
- return entityToReturn;
- }
-
- private static boolean matchFilter(Map<String, Object> tags,
- NameValuePair filter) {
- Object value = tags.get(filter.getName());
- if (value == null) { // doesn't have the filter
- return false;
- } else if (!value.equals(filter.getValue())) { // doesn't match the filter
- return false;
- }
- return true;
- }
-
- private static boolean matchPrimaryFilter(Map<String, Set<Object>> tags,
- NameValuePair filter) {
- Set<Object> value = tags.get(filter.getName());
- if (value == null) { // doesn't have the filter
- return false;
- } else {
- return value.contains(filter.getValue());
- }
- }
-
- private static Object maybeConvert(Object o) {
- if (o instanceof Long) {
- Long l = (Long)o;
- if (l >= Integer.MIN_VALUE && l <= Integer.MAX_VALUE) {
- return l.intValue();
- }
- }
- return o;
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
deleted file mode 100644
index 4e00bc8..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import org.apache.hadoop.yarn.webapp.Controller;
-
-import com.google.inject.Inject;
-
-public class AHSController extends Controller {
-
- @Inject
- AHSController(RequestContext ctx) {
- super(ctx);
- }
-
- @Override
- public void index() {
- setTitle("Application History");
- }
-
- public void app() {
- render(AppPage.class);
- }
-
- public void appattempt() {
- render(AppAttemptPage.class);
- }
-
- public void container() {
- render(ContainerPage.class);
- }
-
- /**
- * Render the logs page.
- */
- public void logs() {
- render(AHSLogsPage.class);
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
deleted file mode 100644
index 8821bc0..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
-import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
-
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
-
-public class AHSLogsPage extends AHSView {
- /*
- * (non-Javadoc)
- *
- * @see
- * org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSView#
- * preHead(org.apache.hadoop .yarn.webapp.hamlet.Hamlet.HTML)
- */
- @Override
- protected void preHead(Page.HTML<_> html) {
- String logEntity = $(ENTITY_STRING);
- if (logEntity == null || logEntity.isEmpty()) {
- logEntity = $(CONTAINER_ID);
- }
- if (logEntity == null || logEntity.isEmpty()) {
- logEntity = "UNKNOWN";
- }
- commonPreHead(html);
- }
-
- /**
- * The content of this page is the AggregatedLogsBlock
- *
- * @return AggregatedLogsBlock.class
- */
- @Override
- protected Class<? extends SubView> content() {
- return AggregatedLogsBlock.class;
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSView.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSView.java
deleted file mode 100644
index 4baa75d..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSView.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.sjoin;
-import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_STATE;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-
-import org.apache.hadoop.yarn.server.webapp.AppsBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.view.TwoColumnLayout;
-
-// Do NOT rename/refactor this to AHSView as it will wreak havoc
-// on Mac OS HFS
-public class AHSView extends TwoColumnLayout {
- static final int MAX_DISPLAY_ROWS = 100; // direct table rendering
- static final int MAX_FAST_ROWS = 1000; // inline js array
-
- @Override
- protected void preHead(Page.HTML<_> html) {
- commonPreHead(html);
- set(DATATABLES_ID, "apps");
- set(initID(DATATABLES, "apps"), appsTableInit());
- setTableStyles(html, "apps", ".queue {width:6em}", ".ui {width:8em}");
-
- // Set the correct title.
- String reqState = $(APP_STATE);
- reqState = (reqState == null || reqState.isEmpty() ? "All" : reqState);
- setTitle(sjoin(reqState, "Applications"));
- }
-
- protected void commonPreHead(Page.HTML<_> html) {
- set(ACCORDION_ID, "nav");
- set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
- }
-
- @Override
- protected Class<? extends SubView> nav() {
- return NavBlock.class;
- }
-
- @Override
- protected Class<? extends SubView> content() {
- return AppsBlock.class;
- }
-
- private String appsTableInit() {
- // id, user, name, queue, starttime, finishtime, state, status, progress, ui
- return tableInit().append(", 'aaData': appsTableData")
- .append(", bDeferRender: true").append(", bProcessing: true")
-
- .append("\n, aoColumnDefs: ").append(getAppsTableColumnDefs())
-
- // Sort by id upon page load
- .append(", aaSorting: [[0, 'desc']]}").toString();
- }
-
- protected String getAppsTableColumnDefs() {
- StringBuilder sb = new StringBuilder();
- return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
- .append(", 'mRender': parseHadoopID }")
-
- .append("\n, {'sType':'numeric', 'aTargets': [5, 6]")
- .append(", 'mRender': renderHadoopDate }")
-
- .append("\n, {'sType':'numeric', bSearchable:false, 'aTargets': [9]")
- .append(", 'mRender': parseHadoopProgress }]").toString();
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java
deleted file mode 100644
index 72facce..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebApp.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
-
-import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryClientService;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
-import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
-import org.apache.hadoop.yarn.webapp.WebApp;
-import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-
-public class AHSWebApp extends WebApp implements YarnWebParams {
-
- private final TimelineStore timelineStore;
- private final TimelineMetricStore timelineMetricStore;
- private final ApplicationHistoryClientService historyClientService;
-
- public AHSWebApp(TimelineStore timelineStore,
- TimelineMetricStore timelineMetricStore,
- ApplicationHistoryClientService historyClientService) {
-
- this.timelineStore = timelineStore;
- this.timelineMetricStore = timelineMetricStore;
- this.historyClientService = historyClientService;
- }
-
- @Override
- public void setup() {
- bind(YarnJacksonJaxbJsonProvider.class);
- bind(AHSWebServices.class);
- bind(TimelineWebServices.class);
- bind(GenericExceptionHandler.class);
- bind(ApplicationBaseProtocol.class).toInstance(historyClientService);
- bind(TimelineStore.class).toInstance(timelineStore);
- bind(TimelineMetricStore.class).toInstance(timelineMetricStore);
- route("/", AHSController.class);
- route(pajoin("/apps", APP_STATE), AHSController.class);
- route(pajoin("/app", APPLICATION_ID), AHSController.class, "app");
- route(pajoin("/appattempt", APPLICATION_ATTEMPT_ID), AHSController.class,
- "appattempt");
- route(pajoin("/container", CONTAINER_ID), AHSController.class, "container");
- route(
- pajoin("/logs", NM_NODENAME, CONTAINER_ID, ENTITY_STRING, APP_OWNER,
- CONTAINER_LOG_TYPE), AHSController.class, "logs");
- }
-}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
deleted file mode 100644
index 3064d2d..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import java.util.Collections;
-import java.util.Set;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.server.webapp.WebServices;
-import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
-import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
-import org.apache.hadoop.yarn.webapp.BadRequestException;
-
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-
-@Singleton
-@Path("/ws/v1/applicationhistory")
-public class AHSWebServices extends WebServices {
-
- @Inject
- public AHSWebServices(ApplicationBaseProtocol appBaseProt) {
- super(appBaseProt);
- }
-
- @GET
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- public AppsInfo get(@Context HttpServletRequest req,
- @Context HttpServletResponse res) {
- return getApps(req, res, null, Collections.<String> emptySet(), null, null,
- null, null, null, null, null, null, Collections.<String> emptySet());
- }
-
- @GET
- @Path("/apps")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- @Override
- public AppsInfo getApps(@Context HttpServletRequest req,
- @Context HttpServletResponse res, @QueryParam("state") String stateQuery,
- @QueryParam("states") Set<String> statesQuery,
- @QueryParam("finalStatus") String finalStatusQuery,
- @QueryParam("user") String userQuery,
- @QueryParam("queue") String queueQuery,
- @QueryParam("limit") String count,
- @QueryParam("startedTimeBegin") String startedBegin,
- @QueryParam("startedTimeEnd") String startedEnd,
- @QueryParam("finishedTimeBegin") String finishBegin,
- @QueryParam("finishedTimeEnd") String finishEnd,
- @QueryParam("applicationTypes") Set<String> applicationTypes) {
- init(res);
- validateStates(stateQuery, statesQuery);
- return super.getApps(req, res, stateQuery, statesQuery, finalStatusQuery,
- userQuery, queueQuery, count, startedBegin, startedEnd, finishBegin,
- finishEnd, applicationTypes);
- }
-
- @GET
- @Path("/apps/{appid}")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- @Override
- public AppInfo getApp(@Context HttpServletRequest req,
- @Context HttpServletResponse res, @PathParam("appid") String appId) {
- init(res);
- return super.getApp(req, res, appId);
- }
-
- @GET
- @Path("/apps/{appid}/appattempts")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- @Override
- public AppAttemptsInfo getAppAttempts(@Context HttpServletRequest req,
- @Context HttpServletResponse res, @PathParam("appid") String appId) {
- init(res);
- return super.getAppAttempts(req, res, appId);
- }
-
- @GET
- @Path("/apps/{appid}/appattempts/{appattemptid}")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- @Override
- public AppAttemptInfo getAppAttempt(@Context HttpServletRequest req,
- @Context HttpServletResponse res, @PathParam("appid") String appId,
- @PathParam("appattemptid") String appAttemptId) {
- init(res);
- return super.getAppAttempt(req, res, appId, appAttemptId);
- }
-
- @GET
- @Path("/apps/{appid}/appattempts/{appattemptid}/containers")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- @Override
- public ContainersInfo getContainers(@Context HttpServletRequest req,
- @Context HttpServletResponse res, @PathParam("appid") String appId,
- @PathParam("appattemptid") String appAttemptId) {
- init(res);
- return super.getContainers(req, res, appId, appAttemptId);
- }
-
- @GET
- @Path("/apps/{appid}/appattempts/{appattemptid}/containers/{containerid}")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- @Override
- public ContainerInfo getContainer(@Context HttpServletRequest req,
- @Context HttpServletResponse res, @PathParam("appid") String appId,
- @PathParam("appattemptid") String appAttemptId,
- @PathParam("containerid") String containerId) {
- init(res);
- return super.getContainer(req, res, appId, appAttemptId, containerId);
- }
-
- private static void
- validateStates(String stateQuery, Set<String> statesQuery) {
- // stateQuery is deprecated.
- if (stateQuery != null && !stateQuery.isEmpty()) {
- statesQuery.add(stateQuery);
- }
- Set<String> appStates = parseQueries(statesQuery, true);
- for (String appState : appStates) {
- switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
- case FINISHED:
- case FAILED:
- case KILLED:
- continue;
- default:
- throw new BadRequestException("Invalid application-state " + appState
- + " specified. It should be a final state");
- }
- }
- }
-
-}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
similarity index 74%
rename from ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java
rename to ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
index 970e868..0bf962e 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/package-info.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSController.java
@@ -15,6 +15,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-@InterfaceAudience.Private
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-import org.apache.hadoop.classification.InterfaceAudience;
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import org.apache.hadoop.yarn.webapp.Controller;
+
+import com.google.inject.Inject;
+
+public class AMSController extends Controller {
+
+ @Inject
+ AMSController(RequestContext ctx) {
+ super(ctx);
+ }
+
+ @Override
+ public void index() {
+ setTitle("Ambari Metrics Service");
+ }
+
+}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
similarity index 55%
rename from ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java
rename to ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
index 1be8a26..2f6eec7 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/ContainerPage.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AMSWebApp.java
@@ -17,25 +17,26 @@
*/
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-
-import org.apache.hadoop.yarn.server.webapp.ContainerBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
-public class ContainerPage extends AHSView {
-
- @Override
- protected void preHead(Page.HTML<_> html) {
- commonPreHead(html);
+public class AMSWebApp extends WebApp implements YarnWebParams {
+
+ private final TimelineMetricStore timelineMetricStore;
- String containerId = $(YarnWebParams.CONTAINER_ID);
- set(TITLE, containerId.isEmpty() ? "Bad request: missing container ID"
- : join("Container ", $(YarnWebParams.CONTAINER_ID)));
+ public AMSWebApp(TimelineMetricStore timelineMetricStore) {
+ this.timelineMetricStore = timelineMetricStore;
}
@Override
- protected Class<? extends SubView> content() {
- return ContainerBlock.class;
+ public void setup() {
+ bind(YarnJacksonJaxbJsonProvider.class);
+ bind(TimelineWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(TimelineMetricStore.class).toInstance(timelineMetricStore);
+ route("/", AMSController.class);
}
}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java
deleted file mode 100644
index 63b44bd..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppAttemptPage.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-
-import org.apache.hadoop.yarn.server.webapp.AppAttemptBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-
-public class AppAttemptPage extends AHSView {
-
- @Override
- protected void preHead(Page.HTML<_> html) {
- commonPreHead(html);
-
- String appAttemptId = $(YarnWebParams.APPLICATION_ATTEMPT_ID);
- set(
- TITLE,
- appAttemptId.isEmpty() ? "Bad request: missing application attempt ID"
- : join("Application Attempt ",
- $(YarnWebParams.APPLICATION_ATTEMPT_ID)));
-
- set(DATATABLES_ID, "containers");
- set(initID(DATATABLES, "containers"), containersTableInit());
- setTableStyles(html, "containers", ".queue {width:6em}", ".ui {width:8em}");
- }
-
- @Override
- protected Class<? extends SubView> content() {
- return AppAttemptBlock.class;
- }
-
- private String containersTableInit() {
- return tableInit().append(", 'aaData': containersTableData")
- .append(", bDeferRender: true").append(", bProcessing: true")
-
- .append("\n, aoColumnDefs: ").append(getContainersTableColumnDefs())
-
- // Sort by id upon page load
- .append(", aaSorting: [[0, 'desc']]}").toString();
- }
-
- protected String getContainersTableColumnDefs() {
- StringBuilder sb = new StringBuilder();
- return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
- .append(", 'mRender': parseHadoopID }]").toString();
- }
-
-}
\ No newline at end of file
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java
deleted file mode 100644
index 96ca659..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.util.StringHelper.join;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-
-import org.apache.hadoop.yarn.server.webapp.AppBlock;
-import org.apache.hadoop.yarn.webapp.SubView;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-
-public class AppPage extends AHSView {
-
- @Override
- protected void preHead(Page.HTML<_> html) {
- commonPreHead(html);
-
- String appId = $(YarnWebParams.APPLICATION_ID);
- set(
- TITLE,
- appId.isEmpty() ? "Bad request: missing application ID" : join(
- "Application ", $(YarnWebParams.APPLICATION_ID)));
-
- set(DATATABLES_ID, "attempts");
- set(initID(DATATABLES, "attempts"), attemptsTableInit());
- setTableStyles(html, "attempts", ".queue {width:6em}", ".ui {width:8em}");
- }
-
- @Override
- protected Class<? extends SubView> content() {
- return AppBlock.class;
- }
-
- private String attemptsTableInit() {
- return tableInit().append(", 'aaData': attemptsTableData")
- .append(", bDeferRender: true").append(", bProcessing: true")
-
- .append("\n, aoColumnDefs: ").append(getAttemptsTableColumnDefs())
-
- // Sort by id upon page load
- .append(", aaSorting: [[0, 'desc']]}").toString();
- }
-
- protected String getAttemptsTableColumnDefs() {
- StringBuilder sb = new StringBuilder();
- return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
- .append(", 'mRender': parseHadoopID }")
-
- .append("\n, {'sType':'numeric', 'aTargets': [1]")
- .append(", 'mRender': renderHadoopDate }]").toString();
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java
deleted file mode 100644
index e84ddec..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/NavBlock.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-
-public class NavBlock extends HtmlBlock {
-
- @Override
- public void render(Block html) {
- html.
- div("#nav").
- h3("Application History").
- ul().
- li().a(url("apps"), "Applications").
- ul().
- li().a(url("apps",
- YarnApplicationState.FINISHED.toString()),
- YarnApplicationState.FINISHED.toString()).
- _().
- li().a(url("apps",
- YarnApplicationState.FAILED.toString()),
- YarnApplicationState.FAILED.toString()).
- _().
- li().a(url("apps",
- YarnApplicationState.KILLED.toString()),
- YarnApplicationState.KILLED.toString()).
- _().
- _().
- _().
- _().
- _();
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
index dc401e6..2930b33 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TimelineWebServices.java
@@ -18,44 +18,26 @@
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Public;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
-import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
-import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
-import org.apache.hadoop.metrics2.sink.timeline.TopNConfig;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.metrics2.sink.timeline.Precision;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.discovery.TimelineMetricMetadataKey;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.EntityIdentifier;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
-import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
-import org.apache.hadoop.yarn.webapp.BadRequestException;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
@@ -66,40 +48,40 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-import static org.apache.hadoop.yarn.util.StringHelper.CSV_JOINER;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
+import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
+import org.apache.hadoop.metrics2.sink.timeline.Precision;
+import org.apache.hadoop.metrics2.sink.timeline.PrecisionLimitExceededException;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricMetadata;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.metrics2.sink.timeline.TopNConfig;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.NameValuePair;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import org.apache.hadoop.yarn.webapp.BadRequestException;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
@Singleton
@Path("/ws/v1/timeline")
-//TODO: support XML serialization/deserialization
public class TimelineWebServices {
-
private static final Log LOG = LogFactory.getLog(TimelineWebServices.class);
-
- private TimelineStore store;
+
private TimelineMetricStore timelineMetricStore;
@Inject
- public TimelineWebServices(TimelineStore store,
- TimelineMetricStore timelineMetricStore) {
- this.store = store;
+ public TimelineWebServices(TimelineMetricStore timelineMetricStore) {
this.timelineMetricStore = timelineMetricStore;
}
@@ -139,125 +121,7 @@ public class TimelineWebServices {
@Context HttpServletRequest req,
@Context HttpServletResponse res) {
init(res);
- return new AboutInfo("Timeline API");
- }
-
- /**
- * Return a list of entities that match the given parameters.
- */
- @GET
- @Path("/{entityType}")
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public TimelineEntities getEntities(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- @PathParam("entityType") String entityType,
- @QueryParam("primaryFilter") String primaryFilter,
- @QueryParam("secondaryFilter") String secondaryFilter,
- @QueryParam("windowStart") String windowStart,
- @QueryParam("windowEnd") String windowEnd,
- @QueryParam("fromId") String fromId,
- @QueryParam("fromTs") String fromTs,
- @QueryParam("limit") String limit,
- @QueryParam("fields") String fields) {
- init(res);
- TimelineEntities entities = null;
- try {
- entities = store.getEntities(
- parseStr(entityType),
- parseLongStr(limit),
- parseLongStr(windowStart),
- parseLongStr(windowEnd),
- parseStr(fromId),
- parseLongStr(fromTs),
- parsePairStr(primaryFilter, ":"),
- parsePairsStr(secondaryFilter, ",", ":"),
- parseFieldsStr(fields, ","));
- } catch (NumberFormatException e) {
- throw new BadRequestException(
- "windowStart, windowEnd or limit is not a numeric value.");
- } catch (IllegalArgumentException e) {
- throw new BadRequestException("requested invalid field.");
- } catch (IOException e) {
- LOG.error("Error getting entities", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- if (entities == null) {
- return new TimelineEntities();
- }
- return entities;
- }
-
- /**
- * Return a single entity of the given entity type and Id.
- */
- @GET
- @Path("/{entityType}/{entityId}")
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public TimelineEntity getEntity(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- @PathParam("entityType") String entityType,
- @PathParam("entityId") String entityId,
- @QueryParam("fields") String fields) {
- init(res);
- TimelineEntity entity = null;
- try {
- entity =
- store.getEntity(parseStr(entityId), parseStr(entityType),
- parseFieldsStr(fields, ","));
- } catch (IllegalArgumentException e) {
- throw new BadRequestException(
- "requested invalid field.");
- } catch (IOException e) {
- LOG.error("Error getting entity", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- if (entity == null) {
- throw new WebApplicationException(Response.Status.NOT_FOUND);
- }
- return entity;
- }
-
- /**
- * Return the events that match the given parameters.
- */
- @GET
- @Path("/{entityType}/events")
- @Produces({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public TimelineEvents getEvents(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- @PathParam("entityType") String entityType,
- @QueryParam("entityId") String entityId,
- @QueryParam("eventType") String eventType,
- @QueryParam("windowStart") String windowStart,
- @QueryParam("windowEnd") String windowEnd,
- @QueryParam("limit") String limit) {
- init(res);
- TimelineEvents events = null;
- try {
- events = store.getEntityTimelines(
- parseStr(entityType),
- parseArrayStr(entityId, ","),
- parseLongStr(limit),
- parseLongStr(windowStart),
- parseLongStr(windowEnd),
- parseArrayStr(eventType, ","));
- } catch (NumberFormatException e) {
- throw new BadRequestException(
- "windowStart, windowEnd or limit is not a numeric value.");
- } catch (IOException e) {
- LOG.error("Error getting entity timelines", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- if (events == null) {
- return new TimelineEvents();
- }
- return events;
+ return new AboutInfo("AMS API");
}
/**
@@ -559,42 +423,6 @@ public class TimelineWebServices {
return timelineMetricStore.getLiveInstances();
}
- /**
- * Store the given entities into the timeline store, and return the errors
- * that happen during storing.
- */
- @POST
- @Consumes({ MediaType.APPLICATION_JSON /* , MediaType.APPLICATION_XML */})
- public TimelinePutResponse postEntities(
- @Context HttpServletRequest req,
- @Context HttpServletResponse res,
- TimelineEntities entities) {
- init(res);
- if (entities == null) {
- return new TimelinePutResponse();
- }
- try {
- List<EntityIdentifier> entityIDs = new ArrayList<EntityIdentifier>();
- for (TimelineEntity entity : entities.getEntities()) {
- EntityIdentifier entityID =
- new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
- entityIDs.add(entityID);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Storing the entity " + entityID + ", JSON-style content: "
- + TimelineUtils.dumpTimelineRecordtoJSON(entity));
- }
- }
- if (LOG.isDebugEnabled()) {
- LOG.debug("Storing entities: " + CSV_JOINER.join(entityIDs));
- }
- return store.put(entities);
- } catch (IOException e) {
- LOG.error("Error putting entities", e);
- throw new WebApplicationException(e,
- Response.Status.INTERNAL_SERVER_ERROR);
- }
- }
-
private void init(HttpServletResponse response) {
response.setContentType(null);
}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
deleted file mode 100644
index ec9b49d..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerState;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
-
-public class ApplicationHistoryStoreTestUtils {
-
- protected ApplicationHistoryStore store;
-
- protected void writeApplicationStartData(ApplicationId appId)
- throws IOException {
- store.applicationStarted(ApplicationStartData.newInstance(appId,
- appId.toString(), "test type", "test queue", "test user", 0, 0));
- }
-
- protected void writeApplicationFinishData(ApplicationId appId)
- throws IOException {
- store.applicationFinished(ApplicationFinishData.newInstance(appId, 0,
- appId.toString(), FinalApplicationStatus.UNDEFINED,
- YarnApplicationState.FINISHED));
- }
-
- protected void writeApplicationAttemptStartData(
- ApplicationAttemptId appAttemptId) throws IOException {
- store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
- appAttemptId, appAttemptId.toString(), 0,
- ContainerId.newContainerId(appAttemptId, 1)));
- }
-
- protected void writeApplicationAttemptFinishData(
- ApplicationAttemptId appAttemptId) throws IOException {
- store.applicationAttemptFinished(ApplicationAttemptFinishData.newInstance(
- appAttemptId, appAttemptId.toString(), "test tracking url",
- FinalApplicationStatus.UNDEFINED, YarnApplicationAttemptState.FINISHED));
- }
-
- protected void writeContainerStartData(ContainerId containerId)
- throws IOException {
- store.containerStarted(ContainerStartData.newInstance(containerId,
- Resource.newInstance(0, 0), NodeId.newInstance("localhost", 0),
- Priority.newInstance(containerId.getId()), 0));
- }
-
- protected void writeContainerFinishData(ContainerId containerId)
- throws IOException {
- store.containerFinished(ContainerFinishData.newInstance(containerId, 0,
- containerId.toString(), 0, ContainerState.COMPLETE));
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
deleted file mode 100644
index f93ac5e..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.List;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
-import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-// Timeline service client support is not enabled for AMBARI_METRICS
-@Ignore
-public class TestApplicationHistoryClientService extends
- ApplicationHistoryStoreTestUtils {
-
- ApplicationHistoryServer historyServer = null;
- String expectedLogUrl = null;
-
- @Before
- public void setup() {
- historyServer = new ApplicationHistoryServer();
- Configuration config = new YarnConfiguration();
- expectedLogUrl = WebAppUtils.getHttpSchemePrefix(config) +
- WebAppUtils.getAHSWebAppURLWithoutScheme(config) +
- "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" +
- "container_0_0001_01_000001/test user";
- config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
- MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
- historyServer.init(config);
- historyServer.start();
- store =
- ((ApplicationHistoryManagerImpl) historyServer.getApplicationHistory())
- .getHistoryStore();
- }
-
- @After
- public void tearDown() throws Exception {
- historyServer.stop();
- }
-
- @Test
- public void testApplicationReport() throws IOException, YarnException {
- ApplicationId appId = null;
- appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- writeApplicationFinishData(appId);
- GetApplicationReportRequest request =
- GetApplicationReportRequest.newInstance(appId);
- GetApplicationReportResponse response =
- historyServer.getClientService().getClientHandler()
- .getApplicationReport(request);
- ApplicationReport appReport = response.getApplicationReport();
- Assert.assertNotNull(appReport);
- Assert.assertEquals("application_0_0001", appReport.getApplicationId()
- .toString());
- Assert.assertEquals("test type", appReport.getApplicationType().toString());
- Assert.assertEquals("test queue", appReport.getQueue().toString());
- }
-
- @Test
- public void testApplications() throws IOException, YarnException {
- ApplicationId appId = null;
- appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- writeApplicationFinishData(appId);
- ApplicationId appId1 = ApplicationId.newInstance(0, 2);
- writeApplicationStartData(appId1);
- writeApplicationFinishData(appId1);
- GetApplicationsRequest request = GetApplicationsRequest.newInstance();
- GetApplicationsResponse response =
- historyServer.getClientService().getClientHandler()
- .getApplications(request);
- List<ApplicationReport> appReport = response.getApplicationList();
- Assert.assertNotNull(appReport);
- Assert.assertEquals(appId, appReport.get(0).getApplicationId());
- Assert.assertEquals(appId1, appReport.get(1).getApplicationId());
- }
-
- @Test
- public void testApplicationAttemptReport() throws IOException, YarnException {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- writeApplicationAttemptStartData(appAttemptId);
- writeApplicationAttemptFinishData(appAttemptId);
- GetApplicationAttemptReportRequest request =
- GetApplicationAttemptReportRequest.newInstance(appAttemptId);
- GetApplicationAttemptReportResponse response =
- historyServer.getClientService().getClientHandler()
- .getApplicationAttemptReport(request);
- ApplicationAttemptReport attemptReport =
- response.getApplicationAttemptReport();
- Assert.assertNotNull(attemptReport);
- Assert.assertEquals("appattempt_0_0001_000001", attemptReport
- .getApplicationAttemptId().toString());
- }
-
- @Test
- public void testApplicationAttempts() throws IOException, YarnException {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- ApplicationAttemptId appAttemptId1 =
- ApplicationAttemptId.newInstance(appId, 2);
- writeApplicationAttemptStartData(appAttemptId);
- writeApplicationAttemptFinishData(appAttemptId);
- writeApplicationAttemptStartData(appAttemptId1);
- writeApplicationAttemptFinishData(appAttemptId1);
- GetApplicationAttemptsRequest request =
- GetApplicationAttemptsRequest.newInstance(appId);
- GetApplicationAttemptsResponse response =
- historyServer.getClientService().getClientHandler()
- .getApplicationAttempts(request);
- List<ApplicationAttemptReport> attemptReports =
- response.getApplicationAttemptList();
- Assert.assertNotNull(attemptReports);
- Assert.assertEquals(appAttemptId, attemptReports.get(0)
- .getApplicationAttemptId());
- Assert.assertEquals(appAttemptId1, attemptReports.get(1)
- .getApplicationAttemptId());
- }
-
- @Test
- public void testContainerReport() throws IOException, YarnException {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
- writeContainerStartData(containerId);
- writeContainerFinishData(containerId);
- writeApplicationFinishData(appId);
- GetContainerReportRequest request =
- GetContainerReportRequest.newInstance(containerId);
- GetContainerReportResponse response =
- historyServer.getClientService().getClientHandler()
- .getContainerReport(request);
- ContainerReport container = response.getContainerReport();
- Assert.assertNotNull(container);
- Assert.assertEquals(containerId, container.getContainerId());
- Assert.assertEquals(expectedLogUrl, container.getLogUrl());
- }
-
- @Test
- public void testContainers() throws IOException, YarnException {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
- ContainerId containerId1 = ContainerId.newContainerId(appAttemptId, 2);
- writeContainerStartData(containerId);
- writeContainerFinishData(containerId);
- writeContainerStartData(containerId1);
- writeContainerFinishData(containerId1);
- writeApplicationFinishData(appId);
- GetContainersRequest request =
- GetContainersRequest.newInstance(appAttemptId);
- GetContainersResponse response =
- historyServer.getClientService().getClientHandler()
- .getContainers(request);
- List<ContainerReport> containers = response.getContainerList();
- Assert.assertNotNull(containers);
- Assert.assertEquals(containerId, containers.get(1).getContainerId());
- Assert.assertEquals(containerId1, containers.get(0).getContainerId());
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
deleted file mode 100644
index aad23d9..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-public class TestApplicationHistoryManagerImpl extends
- ApplicationHistoryStoreTestUtils {
- ApplicationHistoryManagerImpl applicationHistoryManagerImpl = null;
-
- @Before
- public void setup() throws Exception {
- Configuration config = new Configuration();
- config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
- MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
- applicationHistoryManagerImpl = new ApplicationHistoryManagerImpl();
- applicationHistoryManagerImpl.init(config);
- applicationHistoryManagerImpl.start();
- store = applicationHistoryManagerImpl.getHistoryStore();
- }
-
- @After
- public void tearDown() throws Exception {
- applicationHistoryManagerImpl.stop();
- }
-
- @Test
- @Ignore
- public void testApplicationReport() throws IOException, YarnException {
- ApplicationId appId = null;
- appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- writeApplicationFinishData(appId);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- writeApplicationAttemptStartData(appAttemptId);
- writeApplicationAttemptFinishData(appAttemptId);
- ApplicationReport appReport =
- applicationHistoryManagerImpl.getApplication(appId);
- Assert.assertNotNull(appReport);
- Assert.assertEquals(appId, appReport.getApplicationId());
- Assert.assertEquals(appAttemptId,
- appReport.getCurrentApplicationAttemptId());
- Assert.assertEquals(appAttemptId.toString(), appReport.getHost());
- Assert.assertEquals("test type", appReport.getApplicationType().toString());
- Assert.assertEquals("test queue", appReport.getQueue().toString());
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
deleted file mode 100644
index 03205e7..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
+++ /dev/null
@@ -1,267 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.service.Service.STATE;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.HBaseTimelineMetricsService;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixHBaseAccessor;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.MetricCollectorHAController;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.DefaultPhoenixDataSource;
-import org.apache.zookeeper.ClientCnxn;
-import org.easymock.EasyMock;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.junit.runner.RunWith;
-import org.powermock.api.easymock.PowerMock;
-import org.powermock.core.classloader.annotations.PowerMockIgnore;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.Statement;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.METRICS_SITE_CONFIGURATION_FILE;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.anyString;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.fail;
-import static org.powermock.api.easymock.PowerMock.expectNew;
-import static org.powermock.api.easymock.PowerMock.mockStatic;
-import static org.powermock.api.easymock.PowerMock.replayAll;
-import static org.powermock.api.easymock.PowerMock.verifyAll;
-import static org.powermock.api.support.membermodification.MemberMatcher.method;
-import static org.powermock.api.support.membermodification.MemberModifier.suppress;
-
-@RunWith(PowerMockRunner.class)
-@PrepareForTest({ PhoenixHBaseAccessor.class, HBaseTimelineMetricsService.class, UserGroupInformation.class,
- ClientCnxn.class, DefaultPhoenixDataSource.class, ConnectionFactory.class,
- TimelineMetricConfiguration.class, ApplicationHistoryServer.class })
-@PowerMockIgnore( {"javax.management.*"})
-public class TestApplicationHistoryServer {
-
- ApplicationHistoryServer historyServer = null;
- Configuration metricsConf = null;
-
- @Rule
- public TemporaryFolder folder = new TemporaryFolder();
-
- @Before
- @SuppressWarnings("all")
- public void setup() throws URISyntaxException, IOException {
- folder.create();
- File hbaseSite = folder.newFile("hbase-site.xml");
- File amsSite = folder.newFile("ams-site.xml");
-
- FileUtils.writeStringToFile(hbaseSite, "<configuration>\n" +
- " <property>\n" +
- " <name>hbase.defaults.for.version.skip</name>\n" +
- " <value>true</value>\n" +
- " </property>" +
- " <property> " +
- " <name>hbase.zookeeper.quorum</name>\n" +
- " <value>localhost</value>\n" +
- " </property>" +
- "</configuration>");
-
- FileUtils.writeStringToFile(amsSite, "<configuration>\n" +
- " <property>\n" +
- " <name>test</name>\n" +
- " <value>testReady</value>\n" +
- " </property>\n" +
- " <property>\n" +
- " <name>timeline.metrics.host.aggregator.hourly.disabled</name>\n" +
- " <value>true</value>\n" +
- " <description>\n" +
- " Disable host based hourly aggregations.\n" +
- " </description>\n" +
- " </property>\n" +
- " <property>\n" +
- " <name>timeline.metrics.host.aggregator.minute.disabled</name>\n" +
- " <value>true</value>\n" +
- " <description>\n" +
- " Disable host based minute aggregations.\n" +
- " </description>\n" +
- " </property>\n" +
- " <property>\n" +
- " <name>timeline.metrics.cluster.aggregator.hourly.disabled</name>\n" +
- " <value>true</value>\n" +
- " <description>\n" +
- " Disable cluster based hourly aggregations.\n" +
- " </description>\n" +
- " </property>\n" +
- " <property>\n" +
- " <name>timeline.metrics.cluster.aggregator.minute.disabled</name>\n" +
- " <value>true</value>\n" +
- " <description>\n" +
- " Disable cluster based minute aggregations.\n" +
- " </description>\n" +
- " </property>" +
- "</configuration>");
-
- ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
-
- // Add the conf dir to the classpath
- // Chain the current thread classloader
- URLClassLoader urlClassLoader = null;
- try {
- urlClassLoader = new URLClassLoader(new URL[] {
- folder.getRoot().toURI().toURL() }, currentClassLoader);
- } catch (MalformedURLException e) {
- e.printStackTrace();
- }
-
- Thread.currentThread().setContextClassLoader(urlClassLoader);
- metricsConf = new Configuration(false);
- metricsConf.addResource(Thread.currentThread().getContextClassLoader()
- .getResource(METRICS_SITE_CONFIGURATION_FILE).toURI().toURL());
- assertNotNull(metricsConf.get("test"));
- }
-
- // simple test init/start/stop ApplicationHistoryServer. Status should change.
- @Ignore
- @Test(timeout = 50000)
- public void testStartStopServer() throws Exception {
- Configuration config = new YarnConfiguration();
- UserGroupInformation ugi =
- UserGroupInformation.createUserForTesting("ambari", new String[] {"ambari"});
-
- mockStatic(UserGroupInformation.class);
- expect(UserGroupInformation.getCurrentUser()).andReturn(ugi).anyTimes();
- expect(UserGroupInformation.isSecurityEnabled()).andReturn(false).anyTimes();
- config.set(YarnConfiguration.APPLICATION_HISTORY_STORE,
- "org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore");
- Configuration hbaseConf = new Configuration();
- hbaseConf.set("hbase.zookeeper.quorum", "localhost");
-
- TimelineMetricConfiguration metricConfiguration = PowerMock.createNiceMock(TimelineMetricConfiguration.class);
- expectNew(TimelineMetricConfiguration.class).andReturn(metricConfiguration);
- expect(metricConfiguration.getHbaseConf()).andReturn(hbaseConf);
- Configuration metricsConf = new Configuration();
- expect(metricConfiguration.getMetricsConf()).andReturn(metricsConf).anyTimes();
- expect(metricConfiguration.isTimelineMetricsServiceWatcherDisabled()).andReturn(true);
- expect(metricConfiguration.getTimelineMetricsServiceHandlerThreadCount()).andReturn(20).anyTimes();
- expect(metricConfiguration.getWebappAddress()).andReturn("localhost:9990").anyTimes();
- expect(metricConfiguration.getTimelineServiceRpcAddress()).andReturn("localhost:10299").anyTimes();
- expect(metricConfiguration.getClusterZKQuorum()).andReturn("localhost").anyTimes();
- expect(metricConfiguration.getClusterZKClientPort()).andReturn("2181").anyTimes();
-
- Connection connection = createNiceMock(Connection.class);
- Statement stmt = createNiceMock(Statement.class);
- PreparedStatement preparedStatement = createNiceMock(PreparedStatement.class);
- ResultSet rs = createNiceMock(ResultSet.class);
- mockStatic(DriverManager.class);
- expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/ams-hbase-unsecure"))
- .andReturn(connection).anyTimes();
- expect(connection.createStatement()).andReturn(stmt).anyTimes();
- expect(connection.prepareStatement(anyString())).andReturn(preparedStatement).anyTimes();
- suppress(method(Statement.class, "executeUpdate", String.class));
- expect(preparedStatement.executeQuery()).andReturn(rs).anyTimes();
- expect(rs.next()).andReturn(false).anyTimes();
- preparedStatement.close();
- expectLastCall().anyTimes();
- connection.close();
- expectLastCall();
-
- MetricCollectorHAController haControllerMock = PowerMock.createMock(MetricCollectorHAController.class);
- expectNew(MetricCollectorHAController.class, metricConfiguration)
- .andReturn(haControllerMock);
-
- haControllerMock.initializeHAController();
- expectLastCall().once();
- expect(haControllerMock.isInitialized()).andReturn(false).anyTimes();
-
- org.apache.hadoop.hbase.client.Connection conn = createNiceMock(org.apache.hadoop.hbase.client.Connection.class);
- mockStatic(ConnectionFactory.class);
- expect(ConnectionFactory.createConnection((Configuration) anyObject())).andReturn(conn);
- expect(conn.getAdmin()).andReturn(null);
-
- EasyMock.replay(connection, stmt, preparedStatement, rs);
- replayAll();
-
- historyServer = new ApplicationHistoryServer();
- historyServer.init(config);
-
- verifyAll();
-
- assertEquals(STATE.INITED, historyServer.getServiceState());
- assertEquals(4, historyServer.getServices().size());
- ApplicationHistoryClientService historyService =
- historyServer.getClientService();
- assertNotNull(historyServer.getClientService());
- assertEquals(STATE.INITED, historyService.getServiceState());
-
- historyServer.start();
- assertEquals(STATE.STARTED, historyServer.getServiceState());
- assertEquals(STATE.STARTED, historyService.getServiceState());
- historyServer.stop();
- assertEquals(STATE.STOPPED, historyServer.getServiceState());
- }
-
- // test launch method
- @Ignore
- @Test(timeout = 60000)
- public void testLaunch() throws Exception {
-
- UserGroupInformation ugi =
- UserGroupInformation.createUserForTesting("ambari", new String[]{"ambari"});
- mockStatic(UserGroupInformation.class);
- expect(UserGroupInformation.getCurrentUser()).andReturn(ugi).anyTimes();
- expect(UserGroupInformation.isSecurityEnabled()).andReturn(false).anyTimes();
-
- ExitUtil.disableSystemExit();
- try {
- historyServer = ApplicationHistoryServer.launchAppHistoryServer(new String[0]);
- } catch (ExitUtil.ExitException e) {
- assertEquals(0, e.status);
- ExitUtil.resetFirstExitException();
- fail();
- }
- }
-
- @After
- public void stop() {
- if (historyServer != null) {
- historyServer.stop();
- }
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
deleted file mode 100644
index 543c25b..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.net.URI;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestFileSystemApplicationHistoryStore extends
- ApplicationHistoryStoreTestUtils {
-
- private FileSystem fs;
- private Path fsWorkingPath;
-
- @Before
- public void setup() throws Exception {
- fs = new RawLocalFileSystem();
- Configuration conf = new Configuration();
- fs.initialize(new URI("/"), conf);
- fsWorkingPath = new Path("Test");
- fs.delete(fsWorkingPath, true);
- conf.set(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI, fsWorkingPath.toString());
- store = new FileSystemApplicationHistoryStore();
- store.init(conf);
- store.start();
- }
-
- @After
- public void tearDown() throws Exception {
- store.stop();
- fs.delete(fsWorkingPath, true);
- fs.close();
- }
-
- @Test
- public void testReadWriteHistoryData() throws IOException {
- testWriteHistoryData(5);
- testReadHistoryData(5);
- }
-
- private void testWriteHistoryData(int num) throws IOException {
- testWriteHistoryData(num, false, false);
- }
-
- private void testWriteHistoryData(
- int num, boolean missingContainer, boolean missingApplicationAttempt)
- throws IOException {
- // write application history data
- for (int i = 1; i <= num; ++i) {
- ApplicationId appId = ApplicationId.newInstance(0, i);
- writeApplicationStartData(appId);
-
- // write application attempt history data
- for (int j = 1; j <= num; ++j) {
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, j);
- writeApplicationAttemptStartData(appAttemptId);
-
- if (missingApplicationAttempt && j == num) {
- continue;
- }
- // write container history data
- for (int k = 1; k <= num; ++k) {
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
- writeContainerStartData(containerId);
- if (missingContainer && k == num) {
- continue;
- }
- writeContainerFinishData(containerId);
- }
- writeApplicationAttemptFinishData(appAttemptId);
- }
- writeApplicationFinishData(appId);
- }
- }
-
- private void testReadHistoryData(int num) throws IOException {
- testReadHistoryData(num, false, false);
- }
-
- private void testReadHistoryData(
- int num, boolean missingContainer, boolean missingApplicationAttempt)
- throws IOException {
- // read application history data
- Assert.assertEquals(num, store.getAllApplications().size());
- for (int i = 1; i <= num; ++i) {
- ApplicationId appId = ApplicationId.newInstance(0, i);
- ApplicationHistoryData appData = store.getApplication(appId);
- Assert.assertNotNull(appData);
- Assert.assertEquals(appId.toString(), appData.getApplicationName());
- Assert.assertEquals(appId.toString(), appData.getDiagnosticsInfo());
-
- // read application attempt history data
- Assert.assertEquals(num, store.getApplicationAttempts(appId).size());
- for (int j = 1; j <= num; ++j) {
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, j);
- ApplicationAttemptHistoryData attemptData =
- store.getApplicationAttempt(appAttemptId);
- Assert.assertNotNull(attemptData);
- Assert.assertEquals(appAttemptId.toString(), attemptData.getHost());
-
- if (missingApplicationAttempt && j == num) {
- Assert.assertNull(attemptData.getDiagnosticsInfo());
- continue;
- } else {
- Assert.assertEquals(appAttemptId.toString(),
- attemptData.getDiagnosticsInfo());
- }
-
- // read container history data
- Assert.assertEquals(num, store.getContainers(appAttemptId).size());
- for (int k = 1; k <= num; ++k) {
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
- ContainerHistoryData containerData = store.getContainer(containerId);
- Assert.assertNotNull(containerData);
- Assert.assertEquals(Priority.newInstance(containerId.getId()),
- containerData.getPriority());
- if (missingContainer && k == num) {
- Assert.assertNull(containerData.getDiagnosticsInfo());
- } else {
- Assert.assertEquals(containerId.toString(),
- containerData.getDiagnosticsInfo());
- }
- }
- ContainerHistoryData masterContainer =
- store.getAMContainer(appAttemptId);
- Assert.assertNotNull(masterContainer);
- Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
- masterContainer.getContainerId());
- }
- }
- }
-
- @Test
- public void testWriteAfterApplicationFinish() throws IOException {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- writeApplicationFinishData(appId);
- // write application attempt history data
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- try {
- writeApplicationAttemptStartData(appAttemptId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is not opened"));
- }
- try {
- writeApplicationAttemptFinishData(appAttemptId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is not opened"));
- }
- // write container history data
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
- try {
- writeContainerStartData(containerId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is not opened"));
- }
- try {
- writeContainerFinishData(containerId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is not opened"));
- }
- }
-
- @Test
- public void testMassiveWriteContainerHistoryData() throws IOException {
- long mb = 1024 * 1024;
- long usedDiskBefore = fs.getContentSummary(fsWorkingPath).getLength() / mb;
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- writeApplicationStartData(appId);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- for (int i = 1; i <= 1000; ++i) {
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
- writeContainerStartData(containerId);
- writeContainerFinishData(containerId);
- }
- writeApplicationFinishData(appId);
- long usedDiskAfter = fs.getContentSummary(fsWorkingPath).getLength() / mb;
- Assert.assertTrue((usedDiskAfter - usedDiskBefore) < 20);
- }
-
- @Test
- public void testMissingContainerHistoryData() throws IOException {
- testWriteHistoryData(3, true, false);
- testReadHistoryData(3, true, false);
- }
-
- @Test
- public void testMissingApplicationAttemptHistoryData() throws IOException {
- testWriteHistoryData(3, false, true);
- testReadHistoryData(3, false, true);
- }
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
deleted file mode 100644
index b4da01a..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
-public class TestMemoryApplicationHistoryStore extends
- ApplicationHistoryStoreTestUtils {
-
- @Before
- public void setup() {
- store = new MemoryApplicationHistoryStore();
- }
-
- @Test
- public void testReadWriteApplicationHistory() throws Exception {
- // Out of order
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- try {
- writeApplicationFinishData(appId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains(
- "is stored before the start information"));
- }
- // Normal
- int numApps = 5;
- for (int i = 1; i <= numApps; ++i) {
- appId = ApplicationId.newInstance(0, i);
- writeApplicationStartData(appId);
- writeApplicationFinishData(appId);
- }
- Assert.assertEquals(numApps, store.getAllApplications().size());
- for (int i = 1; i <= numApps; ++i) {
- appId = ApplicationId.newInstance(0, i);
- ApplicationHistoryData data = store.getApplication(appId);
- Assert.assertNotNull(data);
- Assert.assertEquals(appId.toString(), data.getApplicationName());
- Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo());
- }
- // Write again
- appId = ApplicationId.newInstance(0, 1);
- try {
- writeApplicationStartData(appId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is already stored"));
- }
- try {
- writeApplicationFinishData(appId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is already stored"));
- }
- }
-
- @Test
- public void testReadWriteApplicationAttemptHistory() throws Exception {
- // Out of order
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- try {
- writeApplicationAttemptFinishData(appAttemptId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains(
- "is stored before the start information"));
- }
- // Normal
- int numAppAttempts = 5;
- writeApplicationStartData(appId);
- for (int i = 1; i <= numAppAttempts; ++i) {
- appAttemptId = ApplicationAttemptId.newInstance(appId, i);
- writeApplicationAttemptStartData(appAttemptId);
- writeApplicationAttemptFinishData(appAttemptId);
- }
- Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId)
- .size());
- for (int i = 1; i <= numAppAttempts; ++i) {
- appAttemptId = ApplicationAttemptId.newInstance(appId, i);
- ApplicationAttemptHistoryData data =
- store.getApplicationAttempt(appAttemptId);
- Assert.assertNotNull(data);
- Assert.assertEquals(appAttemptId.toString(), data.getHost());
- Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo());
- }
- writeApplicationFinishData(appId);
- // Write again
- appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
- try {
- writeApplicationAttemptStartData(appAttemptId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is already stored"));
- }
- try {
- writeApplicationAttemptFinishData(appAttemptId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is already stored"));
- }
- }
-
- @Test
- public void testReadWriteContainerHistory() throws Exception {
- // Out of order
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
- try {
- writeContainerFinishData(containerId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains(
- "is stored before the start information"));
- }
- // Normal
- writeApplicationAttemptStartData(appAttemptId);
- int numContainers = 5;
- for (int i = 1; i <= numContainers; ++i) {
- containerId = ContainerId.newContainerId(appAttemptId, i);
- writeContainerStartData(containerId);
- writeContainerFinishData(containerId);
- }
- Assert
- .assertEquals(numContainers, store.getContainers(appAttemptId).size());
- for (int i = 1; i <= numContainers; ++i) {
- containerId = ContainerId.newContainerId(appAttemptId, i);
- ContainerHistoryData data = store.getContainer(containerId);
- Assert.assertNotNull(data);
- Assert.assertEquals(Priority.newInstance(containerId.getId()),
- data.getPriority());
- Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo());
- }
- ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
- Assert.assertNotNull(masterContainer);
- Assert.assertEquals(ContainerId.newContainerId(appAttemptId, 1),
- masterContainer.getContainerId());
- writeApplicationAttemptFinishData(appAttemptId);
- // Write again
- containerId = ContainerId.newContainerId(appAttemptId, 1);
- try {
- writeContainerStartData(containerId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is already stored"));
- }
- try {
- writeContainerFinishData(containerId);
- Assert.fail();
- } catch (IOException e) {
- Assert.assertTrue(e.getMessage().contains("is already stored"));
- }
- }
-
- @Test
- @Ignore
- public void testMassiveWriteContainerHistory() throws IOException {
- long mb = 1024 * 1024;
- Runtime runtime = Runtime.getRuntime();
- long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb;
- int numContainers = 100000;
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- for (int i = 1; i <= numContainers; ++i) {
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, i);
- writeContainerStartData(containerId);
- writeContainerFinishData(containerId);
- }
- long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb;
- Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200);
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
index 741bb3c..c4cebd6 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
@@ -20,9 +20,9 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.OUT_OFF_BAND_DATA_TIME_ALLOWANCE;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.UPSERT_METRICS_SQL;
+import static org.apache.phoenix.end2end.ParallelStatsDisabledIT.tearDownMiniCluster;
import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
import static org.assertj.core.api.Assertions.assertThat;
-import static org.powermock.api.easymock.PowerMock.mockStatic;
import java.io.IOException;
import java.sql.Connection;
@@ -40,8 +40,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.util.RetryCounterFactory;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils;
@@ -204,7 +204,7 @@ public abstract class AbstractMiniHBaseClusterTest extends BaseTest {
new PhoenixHBaseAccessor(new TimelineMetricConfiguration(new Configuration(), metricsConf),
new PhoenixConnectionProvider() {
@Override
- public HBaseAdmin getHBaseAdmin() throws IOException {
+ public Admin getHBaseAdmin() throws IOException {
try {
return driver.getConnectionQueryServices(null, null).getAdmin();
} catch (SQLException e) {
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
index 57f9796..2a5dd0b 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/ITPhoenixHBaseAccessor.java
@@ -30,6 +30,7 @@ import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.PHOENIX_TABLES;
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.query.PhoenixTransactSQL.PHOENIX_TABLES_REGEX_PATTERN;
import java.io.IOException;
import java.lang.reflect.Field;
@@ -43,12 +44,14 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.metrics2.sink.timeline.ContainerMetric;
import org.apache.hadoop.metrics2.sink.timeline.MetricClusterAggregate;
import org.apache.hadoop.metrics2.sink.timeline.MetricHostAggregate;
@@ -324,26 +327,13 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
@Test
public void testInitPoliciesAndTTL() throws Exception {
- HBaseAdmin hBaseAdmin = hdb.getHBaseAdmin();
- String precisionTtl = "";
- // Verify policies are unset
- for (String tableName : PHOENIX_TABLES) {
- HTableDescriptor tableDescriptor = hBaseAdmin.getTableDescriptor(tableName.getBytes());
- tableDescriptor.setNormalizationEnabled(true);
- Assert.assertTrue("Normalizer enabled.", tableDescriptor.isNormalizationEnabled());
-
- for (HColumnDescriptor family : tableDescriptor.getColumnFamilies()) {
- if (tableName.equals(METRICS_RECORD_TABLE_NAME)) {
- precisionTtl = family.getValue("TTL");
- }
- }
- Assert.assertEquals("Precision TTL value.", "86400", precisionTtl);
- }
+ Admin hBaseAdmin = hdb.getHBaseAdmin();
+ int precisionTtl = 2 * 86400;
Field f = PhoenixHBaseAccessor.class.getDeclaredField("tableTTL");
f.setAccessible(true);
- Map<String, String> precisionValues = (Map<String, String>) f.get(hdb);
- precisionValues.put(METRICS_RECORD_TABLE_NAME, String.valueOf(2 * 86400));
+ Map<String, Integer> precisionValues = (Map<String, Integer>) f.get(hdb);
+ precisionValues.put(METRICS_RECORD_TABLE_NAME, precisionTtl);
f.set(hdb, precisionValues);
Field f2 = PhoenixHBaseAccessor.class.getDeclaredField("timelineMetricsTablesDurability");
@@ -360,13 +350,18 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
for (int i = 0; i < 10; i++) {
LOG.warn("Policy check retry : " + i);
for (String tableName : PHOENIX_TABLES) {
- HTableDescriptor tableDescriptor = hBaseAdmin.getTableDescriptor(tableName.getBytes());
+ TableName[] tableNames = hBaseAdmin.listTableNames(PHOENIX_TABLES_REGEX_PATTERN, false);
+ Optional<TableName> tableNameOptional = Arrays.stream(tableNames)
+ .filter(t -> tableName.equals(t.getNameAsString())).findFirst();
+
+ TableDescriptor tableDescriptor = hBaseAdmin.getTableDescriptor(tableNameOptional.get());
+
normalizerEnabled = tableDescriptor.isNormalizationEnabled();
tableDurabilitySet = (Durability.ASYNC_WAL.equals(tableDescriptor.getDurability()));
if (tableName.equals(METRICS_RECORD_TABLE_NAME)) {
- precisionTableCompactionPolicy = tableDescriptor.getConfigurationValue(HSTORE_ENGINE_CLASS);
+ precisionTableCompactionPolicy = tableDescriptor.getValue(HSTORE_ENGINE_CLASS);
} else {
- aggregateTableCompactionPolicy = tableDescriptor.getConfigurationValue(HSTORE_COMPACTION_CLASS_KEY);
+ aggregateTableCompactionPolicy = tableDescriptor.getValue(HSTORE_COMPACTION_CLASS_KEY);
}
LOG.debug("Table: " + tableName + ", normalizerEnabled = " + normalizerEnabled);
// Best effort for 20 seconds
@@ -374,8 +369,8 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
Thread.sleep(20000l);
}
if (tableName.equals(METRICS_RECORD_TABLE_NAME)) {
- for (HColumnDescriptor family : tableDescriptor.getColumnFamilies()) {
- precisionTtl = family.getValue("TTL");
+ for (ColumnFamilyDescriptor family : tableDescriptor.getColumnFamilies()) {
+ precisionTtl = family.getTimeToLive();
}
}
}
@@ -385,7 +380,7 @@ public class ITPhoenixHBaseAccessor extends AbstractMiniHBaseClusterTest {
Assert.assertTrue("Durability Set.", tableDurabilitySet);
Assert.assertEquals("FIFO compaction policy is set for METRIC_RECORD.", FIFO_COMPACTION_POLICY_CLASS, precisionTableCompactionPolicy);
Assert.assertEquals("FIFO compaction policy is set for aggregate tables", DATE_TIERED_COMPACTION_POLICY, aggregateTableCompactionPolicy);
- Assert.assertEquals("Precision TTL value not changed.", String.valueOf(2 * 86400), precisionTtl);
+ Assert.assertEquals("Precision TTL value as expected.", 2 * 86400, precisionTtl);
hBaseAdmin.close();
}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
deleted file mode 100644
index 9b27309..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileContext;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.iq80.leveldb.DBIterator;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
-import static org.junit.Assert.assertEquals;
-
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class TestLeveldbTimelineStore extends TimelineStoreTestUtils {
- private FileContext fsContext;
- private File fsPath;
-
- @Before
- public void setup() throws Exception {
- fsContext = FileContext.getLocalFSFileContext();
- Configuration conf = new Configuration();
- fsPath = new File("target", this.getClass().getSimpleName() +
- "-tmpDir").getAbsoluteFile();
- fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
- conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH,
- fsPath.getAbsolutePath());
- conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false);
- store = new LeveldbTimelineStore();
- store.init(conf);
- store.start();
- loadTestData();
- loadVerificationData();
- }
-
- @After
- public void tearDown() throws Exception {
- store.stop();
- fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
- }
-
- @Test
- public void testGetSingleEntity() throws IOException {
- super.testGetSingleEntity();
- ((LeveldbTimelineStore)store).clearStartTimeCache();
- super.testGetSingleEntity();
- loadTestData();
- }
-
- @Test
- public void testGetEntities() throws IOException {
- super.testGetEntities();
- }
-
- @Test
- public void testGetEntitiesWithFromId() throws IOException {
- super.testGetEntitiesWithFromId();
- }
-
- @Test
- public void testGetEntitiesWithFromTs() throws IOException {
- super.testGetEntitiesWithFromTs();
- }
-
- @Test
- public void testGetEntitiesWithPrimaryFilters() throws IOException {
- super.testGetEntitiesWithPrimaryFilters();
- }
-
- @Test
- public void testGetEntitiesWithSecondaryFilters() throws IOException {
- super.testGetEntitiesWithSecondaryFilters();
- }
-
- @Test
- public void testGetEvents() throws IOException {
- super.testGetEvents();
- }
-
- @Test
- public void testCacheSizes() {
- Configuration conf = new Configuration();
- assertEquals(10000, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
- assertEquals(10000, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
- conf.setInt(
- YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE,
- 10001);
- assertEquals(10001, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
- conf = new Configuration();
- conf.setInt(
- YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE,
- 10002);
- assertEquals(10002, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
- }
-
- private boolean deleteNextEntity(String entityType, byte[] ts)
- throws IOException, InterruptedException {
- DBIterator iterator = null;
- DBIterator pfIterator = null;
- try {
- iterator = ((LeveldbTimelineStore)store).getDbIterator(false);
- pfIterator = ((LeveldbTimelineStore)store).getDbIterator(false);
- return ((LeveldbTimelineStore)store).deleteNextEntity(entityType, ts,
- iterator, pfIterator, false);
- } finally {
- IOUtils.cleanup(null, iterator, pfIterator);
- }
- }
-
- @Test
- public void testGetEntityTypes() throws IOException {
- List<String> entityTypes = ((LeveldbTimelineStore)store).getEntityTypes();
- assertEquals(4, entityTypes.size());
- assertEquals(entityType1, entityTypes.get(0));
- assertEquals(entityType2, entityTypes.get(1));
- assertEquals(entityType4, entityTypes.get(2));
- assertEquals(entityType5, entityTypes.get(3));
- }
-
- @Test
- public void testDeleteEntities() throws IOException, InterruptedException {
- assertEquals(2, getEntities("type_1").size());
- assertEquals(1, getEntities("type_2").size());
-
- assertEquals(false, deleteNextEntity(entityType1,
- writeReverseOrderedLong(122l)));
- assertEquals(2, getEntities("type_1").size());
- assertEquals(1, getEntities("type_2").size());
-
- assertEquals(true, deleteNextEntity(entityType1,
- writeReverseOrderedLong(123l)));
- List<TimelineEntity> entities = getEntities("type_2");
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId2, entityType2, events2, Collections.singletonMap(
- entityType1, Collections.singleton(entityId1b)), EMPTY_PRIMARY_FILTERS,
- EMPTY_MAP, entities.get(0));
- entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- ((LeveldbTimelineStore)store).discardOldEntities(-123l);
- assertEquals(1, getEntities("type_1").size());
- assertEquals(0, getEntities("type_2").size());
- assertEquals(3, ((LeveldbTimelineStore)store).getEntityTypes().size());
-
- ((LeveldbTimelineStore)store).discardOldEntities(123l);
- assertEquals(0, getEntities("type_1").size());
- assertEquals(0, getEntities("type_2").size());
- assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());
- assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
- }
-
- @Test
- public void testDeleteEntitiesPrimaryFilters()
- throws IOException, InterruptedException {
- Map<String, Set<Object>> primaryFilter =
- Collections.singletonMap("user", Collections.singleton(
- (Object) "otheruser"));
- TimelineEntities atsEntities = new TimelineEntities();
- atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
- entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
- null)));
- TimelinePutResponse response = store.put(atsEntities);
- assertEquals(0, response.getErrors().size());
-
- NameValuePair pfPair = new NameValuePair("user", "otheruser");
- List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
- pfPair);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
- EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0));
-
- entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- ((LeveldbTimelineStore)store).discardOldEntities(-123l);
- assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
- assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
-
- ((LeveldbTimelineStore)store).discardOldEntities(123l);
- assertEquals(0, getEntities("type_1").size());
- assertEquals(0, getEntities("type_2").size());
- assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());
-
- assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
- assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
- }
-
- @Test
- public void testFromTsWithDeletion()
- throws IOException, InterruptedException {
- long l = System.currentTimeMillis();
- assertEquals(2, getEntitiesFromTs("type_1", l).size());
- assertEquals(1, getEntitiesFromTs("type_2", l).size());
- assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- l).size());
- ((LeveldbTimelineStore)store).discardOldEntities(123l);
- assertEquals(0, getEntitiesFromTs("type_1", l).size());
- assertEquals(0, getEntitiesFromTs("type_2", l).size());
- assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- l).size());
- assertEquals(0, getEntities("type_1").size());
- assertEquals(0, getEntities("type_2").size());
- assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- l).size());
- loadTestData();
- assertEquals(0, getEntitiesFromTs("type_1", l).size());
- assertEquals(0, getEntitiesFromTs("type_2", l).size());
- assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- l).size());
- assertEquals(2, getEntities("type_1").size());
- assertEquals(1, getEntities("type_2").size());
- assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
deleted file mode 100644
index 415de53..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-
-public class TestMemoryTimelineStore extends TimelineStoreTestUtils {
-
- @Before
- public void setup() throws Exception {
- store = new MemoryTimelineStore();
- store.init(new YarnConfiguration());
- store.start();
- loadTestData();
- loadVerificationData();
- }
-
- @After
- public void tearDown() throws Exception {
- store.stop();
- }
-
- public TimelineStore getTimelineStore() {
- return store;
- }
-
- @Test
- public void testGetSingleEntity() throws IOException {
- super.testGetSingleEntity();
- }
-
- @Test
- public void testGetEntities() throws IOException {
- super.testGetEntities();
- }
-
- @Test
- public void testGetEntitiesWithFromId() throws IOException {
- super.testGetEntitiesWithFromId();
- }
-
- @Test
- public void testGetEntitiesWithFromTs() throws IOException {
- super.testGetEntitiesWithFromTs();
- }
-
- @Test
- public void testGetEntitiesWithPrimaryFilters() throws IOException {
- super.testGetEntitiesWithPrimaryFilters();
- }
-
- @Test
- public void testGetEntitiesWithSecondaryFilters() throws IOException {
- super.testGetEntitiesWithSecondaryFilters();
- }
-
- @Test
- public void testGetEvents() throws IOException {
- super.testGetEvents();
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
deleted file mode 100644
index d760536..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
+++ /dev/null
@@ -1,789 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
-
-public class TimelineStoreTestUtils {
-
- protected static final List<TimelineEvent> EMPTY_EVENTS =
- Collections.emptyList();
- protected static final Map<String, Object> EMPTY_MAP =
- Collections.emptyMap();
- protected static final Map<String, Set<Object>> EMPTY_PRIMARY_FILTERS =
- Collections.emptyMap();
- protected static final Map<String, Set<String>> EMPTY_REL_ENTITIES =
- Collections.emptyMap();
-
- protected TimelineStore store;
- protected String entityId1;
- protected String entityType1;
- protected String entityId1b;
- protected String entityId2;
- protected String entityType2;
- protected String entityId4;
- protected String entityType4;
- protected String entityId5;
- protected String entityType5;
- protected Map<String, Set<Object>> primaryFilters;
- protected Map<String, Object> secondaryFilters;
- protected Map<String, Object> allFilters;
- protected Map<String, Object> otherInfo;
- protected Map<String, Set<String>> relEntityMap;
- protected Map<String, Set<String>> relEntityMap2;
- protected NameValuePair userFilter;
- protected NameValuePair numericFilter1;
- protected NameValuePair numericFilter2;
- protected NameValuePair numericFilter3;
- protected Collection<NameValuePair> goodTestingFilters;
- protected Collection<NameValuePair> badTestingFilters;
- protected TimelineEvent ev1;
- protected TimelineEvent ev2;
- protected TimelineEvent ev3;
- protected TimelineEvent ev4;
- protected Map<String, Object> eventInfo;
- protected List<TimelineEvent> events1;
- protected List<TimelineEvent> events2;
- protected long beforeTs;
-
- /**
- * Load test data into the given store
- */
- protected void loadTestData() throws IOException {
- beforeTs = System.currentTimeMillis()-1;
- TimelineEntities entities = new TimelineEntities();
- Map<String, Set<Object>> primaryFilters =
- new HashMap<String, Set<Object>>();
- Set<Object> l1 = new HashSet<Object>();
- l1.add("username");
- Set<Object> l2 = new HashSet<Object>();
- l2.add((long)Integer.MAX_VALUE);
- Set<Object> l3 = new HashSet<Object>();
- l3.add("123abc");
- Set<Object> l4 = new HashSet<Object>();
- l4.add((long)Integer.MAX_VALUE + 1l);
- primaryFilters.put("user", l1);
- primaryFilters.put("appname", l2);
- primaryFilters.put("other", l3);
- primaryFilters.put("long", l4);
- Map<String, Object> secondaryFilters = new HashMap<String, Object>();
- secondaryFilters.put("startTime", 123456l);
- secondaryFilters.put("status", "RUNNING");
- Map<String, Object> otherInfo1 = new HashMap<String, Object>();
- otherInfo1.put("info1", "val1");
- otherInfo1.putAll(secondaryFilters);
-
- String entityId1 = "id_1";
- String entityType1 = "type_1";
- String entityId1b = "id_2";
- String entityId2 = "id_2";
- String entityType2 = "type_2";
- String entityId4 = "id_4";
- String entityType4 = "type_4";
- String entityId5 = "id_5";
- String entityType5 = "type_5";
-
- Map<String, Set<String>> relatedEntities =
- new HashMap<String, Set<String>>();
- relatedEntities.put(entityType2, Collections.singleton(entityId2));
-
- TimelineEvent ev3 = createEvent(789l, "launch_event", null);
- TimelineEvent ev4 = createEvent(-123l, "init_event", null);
- List<TimelineEvent> events = new ArrayList<TimelineEvent>();
- events.add(ev3);
- events.add(ev4);
- entities.setEntities(Collections.singletonList(createEntity(entityId2,
- entityType2, null, events, null, null, null)));
- TimelinePutResponse response = store.put(entities);
- assertEquals(0, response.getErrors().size());
-
- TimelineEvent ev1 = createEvent(123l, "start_event", null);
- entities.setEntities(Collections.singletonList(createEntity(entityId1,
- entityType1, 123l, Collections.singletonList(ev1),
- relatedEntities, primaryFilters, otherInfo1)));
- response = store.put(entities);
- assertEquals(0, response.getErrors().size());
- entities.setEntities(Collections.singletonList(createEntity(entityId1b,
- entityType1, null, Collections.singletonList(ev1), relatedEntities,
- primaryFilters, otherInfo1)));
- response = store.put(entities);
- assertEquals(0, response.getErrors().size());
-
- Map<String, Object> eventInfo = new HashMap<String, Object>();
- eventInfo.put("event info 1", "val1");
- TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
- Map<String, Object> otherInfo2 = new HashMap<String, Object>();
- otherInfo2.put("info2", "val2");
- entities.setEntities(Collections.singletonList(createEntity(entityId1,
- entityType1, null, Collections.singletonList(ev2), null,
- primaryFilters, otherInfo2)));
- response = store.put(entities);
- assertEquals(0, response.getErrors().size());
- entities.setEntities(Collections.singletonList(createEntity(entityId1b,
- entityType1, 789l, Collections.singletonList(ev2), null,
- primaryFilters, otherInfo2)));
- response = store.put(entities);
- assertEquals(0, response.getErrors().size());
-
- entities.setEntities(Collections.singletonList(createEntity(
- "badentityid", "badentity", null, null, null, null, otherInfo1)));
- response = store.put(entities);
- assertEquals(1, response.getErrors().size());
- TimelinePutError error = response.getErrors().get(0);
- assertEquals("badentityid", error.getEntityId());
- assertEquals("badentity", error.getEntityType());
- assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
-
- relatedEntities.clear();
- relatedEntities.put(entityType5, Collections.singleton(entityId5));
- entities.setEntities(Collections.singletonList(createEntity(entityId4,
- entityType4, 42l, null, relatedEntities, null, null)));
- response = store.put(entities);
- assertEquals(0, response.getErrors().size());
- }
-
- /**
- * Load verification data
- */
- protected void loadVerificationData() throws Exception {
- userFilter = new NameValuePair("user", "username");
- numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE);
- numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l);
- numericFilter3 = new NameValuePair("other", "123abc");
- goodTestingFilters = new ArrayList<NameValuePair>();
- goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
- goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
- badTestingFilters = new ArrayList<NameValuePair>();
- badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
- badTestingFilters.add(new NameValuePair("status", "FINISHED"));
-
- primaryFilters = new HashMap<String, Set<Object>>();
- Set<Object> l1 = new HashSet<Object>();
- l1.add("username");
- Set<Object> l2 = new HashSet<Object>();
- l2.add(Integer.MAX_VALUE);
- Set<Object> l3 = new HashSet<Object>();
- l3.add("123abc");
- Set<Object> l4 = new HashSet<Object>();
- l4.add((long)Integer.MAX_VALUE + 1l);
- primaryFilters.put("user", l1);
- primaryFilters.put("appname", l2);
- primaryFilters.put("other", l3);
- primaryFilters.put("long", l4);
- secondaryFilters = new HashMap<String, Object>();
- secondaryFilters.put("startTime", 123456);
- secondaryFilters.put("status", "RUNNING");
- allFilters = new HashMap<String, Object>();
- allFilters.putAll(secondaryFilters);
- for (Entry<String, Set<Object>> pf : primaryFilters.entrySet()) {
- for (Object o : pf.getValue()) {
- allFilters.put(pf.getKey(), o);
- }
- }
- otherInfo = new HashMap<String, Object>();
- otherInfo.put("info1", "val1");
- otherInfo.put("info2", "val2");
- otherInfo.putAll(secondaryFilters);
-
- entityId1 = "id_1";
- entityType1 = "type_1";
- entityId1b = "id_2";
- entityId2 = "id_2";
- entityType2 = "type_2";
- entityId4 = "id_4";
- entityType4 = "type_4";
- entityId5 = "id_5";
- entityType5 = "type_5";
-
- ev1 = createEvent(123l, "start_event", null);
-
- eventInfo = new HashMap<String, Object>();
- eventInfo.put("event info 1", "val1");
- ev2 = createEvent(456l, "end_event", eventInfo);
- events1 = new ArrayList<TimelineEvent>();
- events1.add(ev2);
- events1.add(ev1);
-
- relEntityMap =
- new HashMap<String, Set<String>>();
- Set<String> ids = new HashSet<String>();
- ids.add(entityId1);
- ids.add(entityId1b);
- relEntityMap.put(entityType1, ids);
-
- relEntityMap2 =
- new HashMap<String, Set<String>>();
- relEntityMap2.put(entityType4, Collections.singleton(entityId4));
-
- ev3 = createEvent(789l, "launch_event", null);
- ev4 = createEvent(-123l, "init_event", null);
- events2 = new ArrayList<TimelineEvent>();
- events2.add(ev3);
- events2.add(ev4);
- }
-
- public void testGetSingleEntity() throws IOException {
- // test getting entity info
- verifyEntityInfo(null, null, null, null, null, null,
- store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, 123l, store.getEntity(entityId1,
- entityType1, EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, 123l, store.getEntity(entityId1b,
- entityType1, EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
- EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2,
- entityType2, EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
- EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4,
- entityType4, EnumSet.allOf(Field.class)));
-
- verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2,
- EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5,
- entityType5, EnumSet.allOf(Field.class)));
-
- // test getting single fields
- verifyEntityInfo(entityId1, entityType1, events1, null, null, null,
- store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS)));
-
- verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2),
- null, null, null, store.getEntity(entityId1, entityType1,
- EnumSet.of(Field.LAST_EVENT_ONLY)));
-
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1,
- null));
-
- verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null,
- store.getEntity(entityId1, entityType1,
- EnumSet.of(Field.PRIMARY_FILTERS)));
-
- verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo,
- store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO)));
-
- verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null,
- store.getEntity(entityId2, entityType2,
- EnumSet.of(Field.RELATED_ENTITIES)));
- }
-
- protected List<TimelineEntity> getEntities(String entityType)
- throws IOException {
- return store.getEntities(entityType, null, null, null, null, null,
- null, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesWithPrimaryFilter(
- String entityType, NameValuePair primaryFilter) throws IOException {
- return store.getEntities(entityType, null, null, null, null, null,
- primaryFilter, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesFromId(String entityType,
- String fromId) throws IOException {
- return store.getEntities(entityType, null, null, null, fromId, null,
- null, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesFromTs(String entityType,
- long fromTs) throws IOException {
- return store.getEntities(entityType, null, null, null, null, fromTs,
- null, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilter(
- String entityType, NameValuePair primaryFilter, String fromId)
- throws IOException {
- return store.getEntities(entityType, null, null, null, fromId, null,
- primaryFilter, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesFromTsWithPrimaryFilter(
- String entityType, NameValuePair primaryFilter, long fromTs)
- throws IOException {
- return store.getEntities(entityType, null, null, null, null, fromTs,
- primaryFilter, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesFromIdWithWindow(String entityType,
- Long windowEnd, String fromId) throws IOException {
- return store.getEntities(entityType, null, null, windowEnd, fromId, null,
- null, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilterAndWindow(
- String entityType, Long windowEnd, String fromId,
- NameValuePair primaryFilter) throws IOException {
- return store.getEntities(entityType, null, null, windowEnd, fromId, null,
- primaryFilter, null, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntitiesWithFilters(String entityType,
- NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters)
- throws IOException {
- return store.getEntities(entityType, null, null, null, null, null,
- primaryFilter, secondaryFilters, null).getEntities();
- }
-
- protected List<TimelineEntity> getEntities(String entityType, Long limit,
- Long windowStart, Long windowEnd, NameValuePair primaryFilter,
- EnumSet<Field> fields) throws IOException {
- return store.getEntities(entityType, limit, windowStart, windowEnd, null,
- null, primaryFilter, null, fields).getEntities();
- }
-
- public void testGetEntities() throws IOException {
- // test getting entities
- assertEquals("nonzero entities size for nonexistent type", 0,
- getEntities("type_0").size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- getEntities("type_3").size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- getEntities("type_6").size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- getEntitiesWithPrimaryFilter("type_0", userFilter).size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- getEntitiesWithPrimaryFilter("type_3", userFilter).size());
- assertEquals("nonzero entities size for nonexistent type", 0,
- getEntitiesWithPrimaryFilter("type_6", userFilter).size());
-
- List<TimelineEntity> entities = getEntities("type_1");
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntities("type_2");
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
- EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
-
- entities = getEntities("type_1", 1l, null, null, null,
- EnumSet.allOf(Field.class));
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = getEntities("type_1", 1l, 0l, null, null,
- EnumSet.allOf(Field.class));
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = getEntities("type_1", null, 234l, null, null,
- EnumSet.allOf(Field.class));
- assertEquals(0, entities.size());
-
- entities = getEntities("type_1", null, 123l, null, null,
- EnumSet.allOf(Field.class));
- assertEquals(0, entities.size());
-
- entities = getEntities("type_1", null, 234l, 345l, null,
- EnumSet.allOf(Field.class));
- assertEquals(0, entities.size());
-
- entities = getEntities("type_1", null, null, 345l, null,
- EnumSet.allOf(Field.class));
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntities("type_1", null, null, 123l, null,
- EnumSet.allOf(Field.class));
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
- }
-
- public void testGetEntitiesWithFromId() throws IOException {
- List<TimelineEntity> entities = getEntitiesFromId("type_1", entityId1);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesFromId("type_1", entityId1b);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1);
- assertEquals(0, entities.size());
-
- entities = getEntitiesFromId("type_2", "a");
- assertEquals(0, entities.size());
-
- entities = getEntitiesFromId("type_2", entityId2);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
- EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
-
- entities = getEntitiesFromIdWithWindow("type_2", -456l, null);
- assertEquals(0, entities.size());
-
- entities = getEntitiesFromIdWithWindow("type_2", -456l, "a");
- assertEquals(0, entities.size());
-
- entities = getEntitiesFromIdWithWindow("type_2", 0l, null);
- assertEquals(1, entities.size());
-
- entities = getEntitiesFromIdWithWindow("type_2", 0l, entityId2);
- assertEquals(1, entities.size());
-
- // same tests with primary filters
- entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
- entityId1);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
- entityId1b);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l,
- entityId1, userFilter);
- assertEquals(0, entities.size());
-
- entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a");
- assertEquals(0, entities.size());
- }
-
- public void testGetEntitiesWithFromTs() throws IOException {
- assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
- assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
- assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- beforeTs).size());
- long afterTs = System.currentTimeMillis();
- assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
- assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
- assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- afterTs).size());
- assertEquals(2, getEntities("type_1").size());
- assertEquals(1, getEntities("type_2").size());
- assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
- // check insert time is not overwritten
- long beforeTs = this.beforeTs;
- loadTestData();
- assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
- assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
- assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- beforeTs).size());
- assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
- assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
- assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
- afterTs).size());
- }
-
- public void testGetEntitiesWithPrimaryFilters() throws IOException {
- // test using primary filter
- assertEquals("nonzero entities size for primary filter", 0,
- getEntitiesWithPrimaryFilter("type_1",
- new NameValuePair("none", "none")).size());
- assertEquals("nonzero entities size for primary filter", 0,
- getEntitiesWithPrimaryFilter("type_2",
- new NameValuePair("none", "none")).size());
- assertEquals("nonzero entities size for primary filter", 0,
- getEntitiesWithPrimaryFilter("type_3",
- new NameValuePair("none", "none")).size());
-
- List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
- userFilter);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesWithPrimaryFilter("type_2", userFilter);
- assertEquals(0, entities.size());
-
- entities = getEntities("type_1", 1l, null, null, userFilter, null);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = getEntities("type_1", 1l, 0l, null, userFilter, null);
- assertEquals(1, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
-
- entities = getEntities("type_1", null, 234l, null, userFilter, null);
- assertEquals(0, entities.size());
-
- entities = getEntities("type_1", null, 234l, 345l, userFilter, null);
- assertEquals(0, entities.size());
-
- entities = getEntities("type_1", null, null, 345l, userFilter, null);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
- }
-
- public void testGetEntitiesWithSecondaryFilters() throws IOException {
- // test using secondary filter
- List<TimelineEntity> entities = getEntitiesWithFilters("type_1", null,
- goodTestingFilters);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesWithFilters("type_1", userFilter, goodTestingFilters);
- assertEquals(2, entities.size());
- verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(0));
- verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
- primaryFilters, otherInfo, entities.get(1));
-
- entities = getEntitiesWithFilters("type_1", null,
- Collections.singleton(new NameValuePair("user", "none")));
- assertEquals(0, entities.size());
-
- entities = getEntitiesWithFilters("type_1", null, badTestingFilters);
- assertEquals(0, entities.size());
-
- entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters);
- assertEquals(0, entities.size());
- }
-
- public void testGetEvents() throws IOException {
- // test getting entity timelines
- SortedSet<String> sortedSet = new TreeSet<String>();
- sortedSet.add(entityId1);
- List<EventsOfOneEntity> timelines =
- store.getEntityTimelines(entityType1, sortedSet, null, null,
- null, null).getAllEvents();
- assertEquals(1, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
-
- sortedSet.add(entityId1b);
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, 1l,
- null, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- 345l, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- 123l, null, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, 345l, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, 123l, null).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
-
- timelines = store.getEntityTimelines(entityType1, sortedSet, null,
- null, null, Collections.singleton("end_event")).getAllEvents();
- assertEquals(2, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
- verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
-
- sortedSet.add(entityId2);
- timelines = store.getEntityTimelines(entityType2, sortedSet, null,
- null, null, null).getAllEvents();
- assertEquals(1, timelines.size());
- verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4);
- }
-
- /**
- * Verify a single entity and its start time
- */
- protected static void verifyEntityInfo(String entityId, String entityType,
- List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
- Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
- Long startTime, TimelineEntity retrievedEntityInfo) {
-
- verifyEntityInfo(entityId, entityType, events, relatedEntities,
- primaryFilters, otherInfo, retrievedEntityInfo);
- assertEquals(startTime, retrievedEntityInfo.getStartTime());
- }
-
- /**
- * Verify a single entity
- */
- protected static void verifyEntityInfo(String entityId, String entityType,
- List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
- Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
- TimelineEntity retrievedEntityInfo) {
- if (entityId == null) {
- assertNull(retrievedEntityInfo);
- return;
- }
- assertEquals(entityId, retrievedEntityInfo.getEntityId());
- assertEquals(entityType, retrievedEntityInfo.getEntityType());
- if (events == null) {
- assertNull(retrievedEntityInfo.getEvents());
- } else {
- assertEquals(events, retrievedEntityInfo.getEvents());
- }
- if (relatedEntities == null) {
- assertNull(retrievedEntityInfo.getRelatedEntities());
- } else {
- assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities());
- }
- if (primaryFilters == null) {
- assertNull(retrievedEntityInfo.getPrimaryFilters());
- } else {
- assertTrue(primaryFilters.equals(
- retrievedEntityInfo.getPrimaryFilters()));
- }
- if (otherInfo == null) {
- assertNull(retrievedEntityInfo.getOtherInfo());
- } else {
- assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo()));
- }
- }
-
- /**
- * Verify timeline events
- */
- private static void verifyEntityTimeline(
- EventsOfOneEntity retrievedEvents, String entityId, String entityType,
- TimelineEvent... actualEvents) {
- assertEquals(entityId, retrievedEvents.getEntityId());
- assertEquals(entityType, retrievedEvents.getEntityType());
- assertEquals(actualEvents.length, retrievedEvents.getEvents().size());
- for (int i = 0; i < actualEvents.length; i++) {
- assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i));
- }
- }
-
- /**
- * Create a test entity
- */
- protected static TimelineEntity createEntity(String entityId, String entityType,
- Long startTime, List<TimelineEvent> events,
- Map<String, Set<String>> relatedEntities,
- Map<String, Set<Object>> primaryFilters,
- Map<String, Object> otherInfo) {
- TimelineEntity entity = new TimelineEntity();
- entity.setEntityId(entityId);
- entity.setEntityType(entityType);
- entity.setStartTime(startTime);
- entity.setEvents(events);
- if (relatedEntities != null) {
- for (Entry<String, Set<String>> e : relatedEntities.entrySet()) {
- for (String v : e.getValue()) {
- entity.addRelatedEntity(e.getKey(), v);
- }
- }
- } else {
- entity.setRelatedEntities(null);
- }
- entity.setPrimaryFilters(primaryFilters);
- entity.setOtherInfo(otherInfo);
- return entity;
- }
-
- /**
- * Create a test event
- */
- private static TimelineEvent createEvent(long timestamp, String type, Map<String,
- Object> info) {
- TimelineEvent event = new TimelineEvent();
- event.setTimestamp(timestamp);
- event.setEventType(type);
- event.setEventInfo(info);
- return event;
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
deleted file mode 100644
index 605358f..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.apache.hadoop.yarn.webapp.Params.TITLE;
-import static org.mockito.Mockito.mock;
-
-import org.junit.Assert;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryClientService;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerImpl;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStoreTestUtils;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
-import org.apache.hadoop.yarn.util.StringHelper;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
-import org.apache.hadoop.yarn.webapp.test.WebAppTests;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.inject.Injector;
-
-public class TestAHSWebApp extends ApplicationHistoryStoreTestUtils {
-
- public void setApplicationHistoryStore(ApplicationHistoryStore store) {
- this.store = store;
- }
-
- @Before
- public void setup() {
- store = new MemoryApplicationHistoryStore();
- }
-
- @Test
- public void testAppControllerIndex() throws Exception {
- ApplicationHistoryManager ahManager = mock(ApplicationHistoryManager.class);
- Injector injector =
- WebAppTests.createMockInjector(ApplicationHistoryManager.class,
- ahManager);
- AHSController controller = injector.getInstance(AHSController.class);
- controller.index();
- Assert
- .assertEquals("Application History", controller.get(TITLE, "unknown"));
- }
-
- @Test
- public void testView() throws Exception {
- Injector injector =
- WebAppTests.createMockInjector(ApplicationBaseProtocol.class,
- mockApplicationHistoryClientService(5, 1, 1));
- AHSView ahsViewInstance = injector.getInstance(AHSView.class);
-
- ahsViewInstance.render();
- WebAppTests.flushOutput(injector);
-
- ahsViewInstance.set(YarnWebParams.APP_STATE,
- YarnApplicationState.FAILED.toString());
- ahsViewInstance.render();
- WebAppTests.flushOutput(injector);
-
- ahsViewInstance.set(YarnWebParams.APP_STATE, StringHelper.cjoin(
- YarnApplicationState.FAILED.toString(), YarnApplicationState.KILLED));
- ahsViewInstance.render();
- WebAppTests.flushOutput(injector);
- }
-
- @Test
- public void testAboutPage() throws Exception {
- Injector injector =
- WebAppTests.createMockInjector(ApplicationBaseProtocol.class,
- mockApplicationHistoryClientService(0, 0, 0));
- AboutPage aboutPageInstance = injector.getInstance(AboutPage.class);
-
- aboutPageInstance.render();
- WebAppTests.flushOutput(injector);
-
- aboutPageInstance.render();
- WebAppTests.flushOutput(injector);
- }
-
- @Test
- public void testAppPage() throws Exception {
- Injector injector =
- WebAppTests.createMockInjector(ApplicationBaseProtocol.class,
- mockApplicationHistoryClientService(1, 5, 1));
- AppPage appPageInstance = injector.getInstance(AppPage.class);
-
- appPageInstance.render();
- WebAppTests.flushOutput(injector);
-
- appPageInstance.set(YarnWebParams.APPLICATION_ID, ApplicationId
- .newInstance(0, 1).toString());
- appPageInstance.render();
- WebAppTests.flushOutput(injector);
- }
-
- @Test
- public void testAppAttemptPage() throws Exception {
- Injector injector =
- WebAppTests.createMockInjector(ApplicationBaseProtocol.class,
- mockApplicationHistoryClientService(1, 1, 5));
- AppAttemptPage appAttemptPageInstance =
- injector.getInstance(AppAttemptPage.class);
-
- appAttemptPageInstance.render();
- WebAppTests.flushOutput(injector);
-
- appAttemptPageInstance.set(YarnWebParams.APPLICATION_ATTEMPT_ID,
- ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1)
- .toString());
- appAttemptPageInstance.render();
- WebAppTests.flushOutput(injector);
- }
-
- @Test
- public void testContainerPage() throws Exception {
- Injector injector =
- WebAppTests.createMockInjector(ApplicationBaseProtocol.class,
- mockApplicationHistoryClientService(1, 1, 1));
- ContainerPage containerPageInstance =
- injector.getInstance(ContainerPage.class);
-
- containerPageInstance.render();
- WebAppTests.flushOutput(injector);
-
- containerPageInstance.set(
- YarnWebParams.CONTAINER_ID,
- ContainerId
- .newContainerId(
- ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1),
- 1).toString());
- containerPageInstance.render();
- WebAppTests.flushOutput(injector);
- }
-
- ApplicationHistoryClientService mockApplicationHistoryClientService(int numApps,
- int numAppAttempts, int numContainers) throws Exception {
- ApplicationHistoryManager ahManager =
- new MockApplicationHistoryManagerImpl(store);
- ApplicationHistoryClientService historyClientService =
- new ApplicationHistoryClientService(ahManager);
- for (int i = 1; i <= numApps; ++i) {
- ApplicationId appId = ApplicationId.newInstance(0, i);
- writeApplicationStartData(appId);
- for (int j = 1; j <= numAppAttempts; ++j) {
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, j);
- writeApplicationAttemptStartData(appAttemptId);
- for (int k = 1; k <= numContainers; ++k) {
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, k);
- writeContainerStartData(containerId);
- writeContainerFinishData(containerId);
- }
- writeApplicationAttemptFinishData(appAttemptId);
- }
- writeApplicationFinishData(appId);
- }
- return historyClientService;
- }
-
- class MockApplicationHistoryManagerImpl extends ApplicationHistoryManagerImpl {
-
- public MockApplicationHistoryManagerImpl(ApplicationHistoryStore store) {
- super();
- init(new YarnConfiguration());
- start();
- }
-
- @Override
- protected ApplicationHistoryStore createApplicationHistoryStore(
- Configuration conf) {
- return store;
- }
- };
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
deleted file mode 100644
index 44b3f65..0000000
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
+++ /dev/null
@@ -1,302 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import javax.ws.rs.core.MediaType;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerState;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryClientService;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
-import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
-import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
-import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.servlet.GuiceServletContextListener;
-import com.google.inject.servlet.ServletModule;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.ClientResponse.Status;
-import com.sun.jersey.api.client.UniformInterfaceException;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
-import com.sun.jersey.test.framework.JerseyTest;
-import com.sun.jersey.test.framework.WebAppDescriptor;
-
-public class TestAHSWebServices extends JerseyTest {
-
- private static ApplicationHistoryClientService historyClientService;
-
- private Injector injector = Guice.createInjector(new ServletModule() {
-
- @Override
- protected void configureServlets() {
- bind(JAXBContextResolver.class);
- bind(AHSWebServices.class);
- bind(GenericExceptionHandler.class);
- try {
- historyClientService = mockApplicationHistoryManager();
- } catch (Exception e) {
- Assert.fail();
- }
- bind(ApplicationBaseProtocol.class).toInstance(historyClientService);
- serve("/*").with(GuiceContainer.class);
- }
- });
-
- public class GuiceServletConfig extends GuiceServletContextListener {
-
- @Override
- protected Injector getInjector() {
- return injector;
- }
- }
-
- private ApplicationHistoryClientService mockApplicationHistoryManager()
- throws Exception {
- ApplicationHistoryStore store = new MemoryApplicationHistoryStore();
- TestAHSWebApp testAHSWebApp = new TestAHSWebApp();
- testAHSWebApp.setApplicationHistoryStore(store);
- return testAHSWebApp.mockApplicationHistoryClientService(5, 5, 5);
- }
-
- public TestAHSWebServices() {
- super(new WebAppDescriptor.Builder(
- "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
- .contextListenerClass(GuiceServletConfig.class)
- .filterClass(com.google.inject.servlet.GuiceFilter.class)
- .contextPath("jersey-guice-filter").servletPath("/").build());
- }
-
- @Before
- @Override
- public void setUp() throws Exception {
- super.setUp();
- }
-
- @Test
- public void testInvalidUri() throws JSONException, Exception {
- WebResource r = resource();
- String responseStr = "";
- try {
- responseStr =
- r.path("ws").path("v1").path("applicationhistory").path("bogus")
- .accept(MediaType.APPLICATION_JSON).get(String.class);
- fail("should have thrown exception on invalid uri");
- } catch (UniformInterfaceException ue) {
- ClientResponse response = ue.getResponse();
- assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
-
- WebServicesTestUtils.checkStringMatch(
- "error string exists and shouldn't", "", responseStr);
- }
- }
-
- @Test
- public void testInvalidUri2() throws JSONException, Exception {
- WebResource r = resource();
- String responseStr = "";
- try {
- responseStr = r.accept(MediaType.APPLICATION_JSON).get(String.class);
- fail("should have thrown exception on invalid uri");
- } catch (UniformInterfaceException ue) {
- ClientResponse response = ue.getResponse();
- assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
- WebServicesTestUtils.checkStringMatch(
- "error string exists and shouldn't", "", responseStr);
- }
- }
-
- @Test
- public void testInvalidAccept() throws JSONException, Exception {
- WebResource r = resource();
- String responseStr = "";
- try {
- responseStr =
- r.path("ws").path("v1").path("applicationhistory")
- .accept(MediaType.TEXT_PLAIN).get(String.class);
- fail("should have thrown exception on invalid uri");
- } catch (UniformInterfaceException ue) {
- ClientResponse response = ue.getResponse();
- assertEquals(Status.INTERNAL_SERVER_ERROR,
- response.getClientResponseStatus());
- WebServicesTestUtils.checkStringMatch(
- "error string exists and shouldn't", "", responseStr);
- }
- }
-
- @Test
- public void testAppsQuery() throws Exception {
- WebResource r = resource();
- ClientResponse response =
- r.path("ws").path("v1").path("applicationhistory").path("apps")
- .queryParam("state", YarnApplicationState.FINISHED.toString())
- .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("incorrect number of elements", 1, json.length());
- JSONObject apps = json.getJSONObject("apps");
- assertEquals("incorrect number of elements", 1, apps.length());
- JSONArray array = apps.getJSONArray("app");
- assertEquals("incorrect number of elements", 5, array.length());
- }
-
- @Test
- public void testSingleApp() throws Exception {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- WebResource r = resource();
- ClientResponse response =
- r.path("ws").path("v1").path("applicationhistory").path("apps")
- .path(appId.toString()).accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("incorrect number of elements", 1, json.length());
- JSONObject app = json.getJSONObject("app");
- assertEquals(appId.toString(), app.getString("appId"));
- assertEquals(appId.toString(), app.get("name"));
- assertEquals(appId.toString(), app.get("diagnosticsInfo"));
- assertEquals("test queue", app.get("queue"));
- assertEquals("test user", app.get("user"));
- assertEquals("test type", app.get("type"));
- assertEquals(FinalApplicationStatus.UNDEFINED.toString(),
- app.get("finalAppStatus"));
- assertEquals(YarnApplicationState.FINISHED.toString(), app.get("appState"));
- }
-
- @Test
- public void testMultipleAttempts() throws Exception {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- WebResource r = resource();
- ClientResponse response =
- r.path("ws").path("v1").path("applicationhistory").path("apps")
- .path(appId.toString()).path("appattempts")
- .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("incorrect number of elements", 1, json.length());
- JSONObject appAttempts = json.getJSONObject("appAttempts");
- assertEquals("incorrect number of elements", 1, appAttempts.length());
- JSONArray array = appAttempts.getJSONArray("appAttempt");
- assertEquals("incorrect number of elements", 5, array.length());
- }
-
- @Test
- public void testSingleAttempt() throws Exception {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- WebResource r = resource();
- ClientResponse response =
- r.path("ws").path("v1").path("applicationhistory").path("apps")
- .path(appId.toString()).path("appattempts")
- .path(appAttemptId.toString()).accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("incorrect number of elements", 1, json.length());
- JSONObject appAttempt = json.getJSONObject("appAttempt");
- assertEquals(appAttemptId.toString(), appAttempt.getString("appAttemptId"));
- assertEquals(appAttemptId.toString(), appAttempt.getString("host"));
- assertEquals(appAttemptId.toString(),
- appAttempt.getString("diagnosticsInfo"));
- assertEquals("test tracking url", appAttempt.getString("trackingUrl"));
- assertEquals(YarnApplicationAttemptState.FINISHED.toString(),
- appAttempt.get("appAttemptState"));
- }
-
- @Test
- public void testMultipleContainers() throws Exception {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- WebResource r = resource();
- ClientResponse response =
- r.path("ws").path("v1").path("applicationhistory").path("apps")
- .path(appId.toString()).path("appattempts")
- .path(appAttemptId.toString()).path("containers")
- .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("incorrect number of elements", 1, json.length());
- JSONObject containers = json.getJSONObject("containers");
- assertEquals("incorrect number of elements", 1, containers.length());
- JSONArray array = containers.getJSONArray("container");
- assertEquals("incorrect number of elements", 5, array.length());
- }
-
- @Test
- public void testSingleContainer() throws Exception {
- ApplicationId appId = ApplicationId.newInstance(0, 1);
- ApplicationAttemptId appAttemptId =
- ApplicationAttemptId.newInstance(appId, 1);
- ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1);
- WebResource r = resource();
- ClientResponse response =
- r.path("ws").path("v1").path("applicationhistory").path("apps")
- .path(appId.toString()).path("appattempts")
- .path(appAttemptId.toString()).path("containers")
- .path(containerId.toString()).accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- JSONObject json = response.getEntity(JSONObject.class);
- assertEquals("incorrect number of elements", 1, json.length());
- JSONObject container = json.getJSONObject("container");
- assertEquals(containerId.toString(), container.getString("containerId"));
- assertEquals(containerId.toString(), container.getString("diagnosticsInfo"));
- assertEquals("0", container.getString("allocatedMB"));
- assertEquals("0", container.getString("allocatedVCores"));
- assertEquals(NodeId.newInstance("localhost", 0).toString(),
- container.getString("assignedNodeId"));
- assertEquals(Priority.newInstance(containerId.getId()).toString(),
- container.getString("priority"));
- Configuration conf = new YarnConfiguration();
- assertEquals(WebAppUtils.getHttpSchemePrefix(conf) +
- WebAppUtils.getAHSWebAppURLWithoutScheme(conf) +
- "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" +
- "container_0_0001_01_000001/test user",
- container.getString("logUrl"));
- assertEquals(ContainerState.COMPLETE.toString(),
- container.getString("containerState"));
- }
-
-}
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
index b093a2a..83e2a27 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertEquals;
import javax.ws.rs.core.MediaType;
-import junit.framework.Assert;
-
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
@@ -33,7 +31,6 @@ import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TestTimelineMetricStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.junit.Test;
@@ -49,10 +46,10 @@ import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.JerseyTest;
import com.sun.jersey.test.framework.WebAppDescriptor;
+import junit.framework.Assert;
-public class TestTimelineWebServices extends JerseyTest {
- private static TimelineStore store;
+public class TestTimelineWebServices extends JerseyTest {
private static TimelineMetricStore metricStore;
private long beforeTime;
@@ -63,13 +60,11 @@ public class TestTimelineWebServices extends JerseyTest {
bind(YarnJacksonJaxbJsonProvider.class);
bind(TimelineWebServices.class);
bind(GenericExceptionHandler.class);
- try{
- store = mockTimelineStore();
+ try {
metricStore = new TestTimelineMetricStore();
} catch (Exception e) {
Assert.fail();
}
- bind(TimelineStore.class).toInstance(store);
bind(TimelineMetricStore.class).toInstance(metricStore);
serve("/*").with(GuiceContainer.class);
}
@@ -84,59 +79,30 @@ public class TestTimelineWebServices extends JerseyTest {
}
}
- private TimelineStore mockTimelineStore()
- throws Exception {
- beforeTime = System.currentTimeMillis() - 1;
- TestMemoryTimelineStore store = new TestMemoryTimelineStore();
- store.setup();
- return store.getTimelineStore();
- }
-
public TestTimelineWebServices() {
super(new WebAppDescriptor.Builder(
- "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
- .contextListenerClass(GuiceServletConfig.class)
- .filterClass(com.google.inject.servlet.GuiceFilter.class)
- .contextPath("jersey-guice-filter")
- .servletPath("/")
- .clientConfig(new DefaultClientConfig(YarnJacksonJaxbJsonProvider.class))
- .build());
+ "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter")
+ .servletPath("/")
+ .clientConfig(new DefaultClientConfig(YarnJacksonJaxbJsonProvider.class))
+ .build());
}
@Test
public void testAbout() throws Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
+ .accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
TimelineWebServices.AboutInfo about =
- response.getEntity(TimelineWebServices.AboutInfo.class);
+ response.getEntity(TimelineWebServices.AboutInfo.class);
Assert.assertNotNull(about);
- Assert.assertEquals("Timeline API", about.getAbout());
- }
-
- private static void verifyEntities(TimelineEntities entities) {
- Assert.assertNotNull(entities);
- Assert.assertEquals(2, entities.getEntities().size());
- TimelineEntity entity1 = entities.getEntities().get(0);
- Assert.assertNotNull(entity1);
- Assert.assertEquals("id_1", entity1.getEntityId());
- Assert.assertEquals("type_1", entity1.getEntityType());
- Assert.assertEquals(123l, entity1.getStartTime().longValue());
- Assert.assertEquals(2, entity1.getEvents().size());
- Assert.assertEquals(4, entity1.getPrimaryFilters().size());
- Assert.assertEquals(4, entity1.getOtherInfo().size());
- TimelineEntity entity2 = entities.getEntities().get(1);
- Assert.assertNotNull(entity2);
- Assert.assertEquals("id_2", entity2.getEntityId());
- Assert.assertEquals("type_1", entity2.getEntityType());
- Assert.assertEquals(123l, entity2.getStartTime().longValue());
- Assert.assertEquals(2, entity2.getEvents().size());
- Assert.assertEquals(4, entity2.getPrimaryFilters().size());
- Assert.assertEquals(4, entity2.getOtherInfo().size());
+ Assert.assertEquals("AMS API", about.getAbout());
}
-
+
private static void verifyMetrics(TimelineMetrics metrics) {
Assert.assertNotNull(metrics);
Assert.assertEquals("cpu_user", metrics.getMetrics().get(0).getMetricName());
@@ -146,239 +112,6 @@ public class TestTimelineWebServices extends JerseyTest {
}
@Test
- public void testGetEntities() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- verifyEntities(response.getEntity(TimelineEntities.class));
- }
-
- @Test
- public void testFromId() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("fromId", "id_2")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- assertEquals(1, response.getEntity(TimelineEntities.class).getEntities()
- .size());
-
- response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("fromId", "id_1")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- assertEquals(2, response.getEntity(TimelineEntities.class).getEntities()
- .size());
- }
-
- @Test
- public void testFromTs() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("fromTs", Long.toString(beforeTime))
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- assertEquals(0, response.getEntity(TimelineEntities.class).getEntities()
- .size());
-
- response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("fromTs", Long.toString(
- System.currentTimeMillis()))
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- assertEquals(2, response.getEntity(TimelineEntities.class).getEntities()
- .size());
- }
-
- @Test
- public void testPrimaryFilterString() {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("primaryFilter", "user:username")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- verifyEntities(response.getEntity(TimelineEntities.class));
- }
-
- @Test
- public void testPrimaryFilterInteger() {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("primaryFilter",
- "appname:" + Integer.toString(Integer.MAX_VALUE))
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- verifyEntities(response.getEntity(TimelineEntities.class));
- }
-
- @Test
- public void testPrimaryFilterLong() {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("primaryFilter",
- "long:" + Long.toString((long)Integer.MAX_VALUE + 1l))
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- verifyEntities(response.getEntity(TimelineEntities.class));
- }
-
- @Test
- public void testPrimaryFilterNumericString() {
- // without quotes, 123abc is interpreted as the number 123,
- // which finds no entities
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("primaryFilter", "other:123abc")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- assertEquals(0, response.getEntity(TimelineEntities.class).getEntities()
- .size());
- }
-
- @Test
- public void testPrimaryFilterNumericStringWithQuotes() {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").queryParam("primaryFilter", "other:\"123abc\"")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- verifyEntities(response.getEntity(TimelineEntities.class));
- }
-
- @Test
- public void testSecondaryFilters() {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1")
- .queryParam("secondaryFilter",
- "user:username,appname:" + Integer.toString(Integer.MAX_VALUE))
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- verifyEntities(response.getEntity(TimelineEntities.class));
- }
-
- @Test
- public void testGetEntity() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").path("id_1")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- TimelineEntity entity = response.getEntity(TimelineEntity.class);
- Assert.assertNotNull(entity);
- Assert.assertEquals("id_1", entity.getEntityId());
- Assert.assertEquals("type_1", entity.getEntityType());
- Assert.assertEquals(123l, entity.getStartTime().longValue());
- Assert.assertEquals(2, entity.getEvents().size());
- Assert.assertEquals(4, entity.getPrimaryFilters().size());
- Assert.assertEquals(4, entity.getOtherInfo().size());
- }
-
- @Test
- public void testGetEntityFields1() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").path("id_1").queryParam("fields", "events,otherinfo")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- TimelineEntity entity = response.getEntity(TimelineEntity.class);
- Assert.assertNotNull(entity);
- Assert.assertEquals("id_1", entity.getEntityId());
- Assert.assertEquals("type_1", entity.getEntityType());
- Assert.assertEquals(123l, entity.getStartTime().longValue());
- Assert.assertEquals(2, entity.getEvents().size());
- Assert.assertEquals(0, entity.getPrimaryFilters().size());
- Assert.assertEquals(4, entity.getOtherInfo().size());
- }
-
- @Test
- public void testGetEntityFields2() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").path("id_1").queryParam("fields", "lasteventonly," +
- "primaryfilters,relatedentities")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- TimelineEntity entity = response.getEntity(TimelineEntity.class);
- Assert.assertNotNull(entity);
- Assert.assertEquals("id_1", entity.getEntityId());
- Assert.assertEquals("type_1", entity.getEntityType());
- Assert.assertEquals(123l, entity.getStartTime().longValue());
- Assert.assertEquals(1, entity.getEvents().size());
- Assert.assertEquals(4, entity.getPrimaryFilters().size());
- Assert.assertEquals(0, entity.getOtherInfo().size());
- }
-
- @Test
- public void testGetEvents() throws Exception {
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .path("type_1").path("events")
- .queryParam("entityId", "id_1")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- TimelineEvents events = response.getEntity(TimelineEvents.class);
- Assert.assertNotNull(events);
- Assert.assertEquals(1, events.getAllEvents().size());
- TimelineEvents.EventsOfOneEntity partEvents = events.getAllEvents().get(0);
- Assert.assertEquals(2, partEvents.getEvents().size());
- TimelineEvent event1 = partEvents.getEvents().get(0);
- Assert.assertEquals(456l, event1.getTimestamp());
- Assert.assertEquals("end_event", event1.getEventType());
- Assert.assertEquals(1, event1.getEventInfo().size());
- TimelineEvent event2 = partEvents.getEvents().get(1);
- Assert.assertEquals(123l, event2.getTimestamp());
- Assert.assertEquals("start_event", event2.getEventType());
- Assert.assertEquals(0, event2.getEventInfo().size());
- }
-
- @Test
- public void testPostEntities() throws Exception {
- TimelineEntities entities = new TimelineEntities();
- TimelineEntity entity = new TimelineEntity();
- entity.setEntityId("test id");
- entity.setEntityType("test type");
- entity.setStartTime(System.currentTimeMillis());
- entities.addEntity(entity);
- WebResource r = resource();
- ClientResponse response = r.path("ws").path("v1").path("timeline")
- .accept(MediaType.APPLICATION_JSON)
- .type(MediaType.APPLICATION_JSON)
- .post(ClientResponse.class, entities);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- TimelinePutResponse putResposne = response.getEntity(TimelinePutResponse.class);
- Assert.assertNotNull(putResposne);
- Assert.assertEquals(0, putResposne.getErrors().size());
- // verify the entity exists in the store
- response = r.path("ws").path("v1").path("timeline")
- .path("test type").path("test id")
- .accept(MediaType.APPLICATION_JSON)
- .get(ClientResponse.class);
- assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
- entity = response.getEntity(TimelineEntity.class);
- Assert.assertNotNull(entity);
- Assert.assertEquals("test id", entity.getEntityId());
- Assert.assertEquals("test type", entity.getEntityType());
- }
-
- @Test
public void testGetMetrics() throws Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("timeline")
diff --git a/ambari-metrics/pom.xml b/ambari-metrics/pom.xml
index 98559e6..c91f2f9 100644
--- a/ambari-metrics/pom.xml
+++ b/ambari-metrics/pom.xml
@@ -41,23 +41,17 @@
<python.ver>python >= 2.6</python.ver>
<deb.python.ver>python (>= 2.6)</deb.python.ver>
<!--TODO change to HDP URL-->
- <hbase.tar>https://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.6.4.0/tars/hbase/hbase-1.1.2.2.6.4.0-91.tar.gz</hbase.tar>
- <hbase.folder>hbase-1.1.2.2.6.4.0-91</hbase.folder>
- <hadoop.tar>https://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.6.4.0/tars/hadoop/hadoop-2.7.3.2.6.4.0-91.tar.gz</hadoop.tar>
- <hadoop.folder>hadoop-2.7.3.2.6.4.0-91</hadoop.folder>
- <hbase.winpkg.zip>https://msibuilds.blob.core.windows.net/hdp/2.x/2.2.4.2/2/hbase-0.98.4.2.2.4.2-0002-hadoop2.winpkg.zip</hbase.winpkg.zip>
- <hbase.winpkg.folder>hbase-0.98.4.2.2.4.2-0002-hadoop2</hbase.winpkg.folder>
- <hadoop.winpkg.zip>https://msibuilds.blob.core.windows.net/hdp/2.x/2.2.4.2/2/hadoop-2.6.0.2.2.4.2-0002.winpkg.zip</hadoop.winpkg.zip>
- <hadoop.winpkg.folder>hadoop-2.6.0.2.2.4.2-0002</hadoop.winpkg.folder>
+ <hbase.tar>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos7/3.x/BUILDS/3.0.0.0-623/tars/hbase/hbase-2.0.0.3.0.0.0-623-bin.tar.gz</hbase.tar>
+ <hbase.folder>hbase-1.1.2.2.6.0.3-8</hbase.folder>
+ <hadoop.tar>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos7/3.x/BUILDS/3.0.0.0-623/tars/hadoop/hadoop-3.0.0.3.0.0.0-623.tar.gz</hadoop.tar>
+ <hadoop.folder>hadoop-3.0.0.3.0.0.0-623</hadoop.folder>
<grafana.folder>grafana-2.6.0</grafana.folder>
<grafana.tar>https://grafanarel.s3.amazonaws.com/builds/grafana-2.6.0.linux-x64.tar.gz</grafana.tar>
- <phoenix.tar>https://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.6.4.0/tars/phoenix/phoenix-4.7.0.2.6.4.0-91.tar.gz</phoenix.tar>
- <phoenix.folder>phoenix-4.7.0.2.6.4.0-91</phoenix.folder>
+ <phoenix.tar>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos7/3.x/BUILDS/3.0.0.0-623/tars/phoenix/phoenix-5.0.0.3.0.0.0-623.tar.gz</phoenix.tar>
+ <phoenix.folder>phoenix-5.0.0.3.0.0.0-623</phoenix.folder>
<spark.tar>http://dev.hortonworks.com.s3.amazonaws.com/HDP/centos7/3.x/BUILDS/3.0.0.0-439/tars/spark2/spark-2.1.0.3.0.0.0-439-bin-3.0.0.3.0.0.0-439.tgz</spark.tar>
<spark.folder>spark-2.1.0.3.0.0.0-439-bin-3.0.0.3.0.0.0-439</spark.folder>
- <resmonitor.install.dir>
- /usr/lib/python2.6/site-packages/resource_monitoring
- </resmonitor.install.dir>
+ <resmonitor.install.dir>/usr/lib/python2.6/site-packages/resource_monitoring</resmonitor.install.dir>
<powermock.version>1.6.2</powermock.version>
<distMgmtSnapshotsId>apache.snapshots.https</distMgmtSnapshotsId>
<distMgmtSnapshotsName>Apache Development Snapshot Repository</distMgmtSnapshotsName>
--
To stop receiving notification emails like this one, please contact
avijayan@apache.org.