You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/12/01 21:03:50 UTC
[22/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)
AMBARI-5707. Renaming a module. (swagle)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c20904e4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c20904e4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c20904e4
Branch: refs/heads/branch-metrics-dev
Commit: c20904e412d96f040b3fa067a9d4e2a7e1eee8ae
Parents: ba3d692
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Mon Dec 1 12:03:20 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Mon Dec 1 12:03:20 2014 -0800
----------------------------------------------------------------------
.../conf/unix/ambari-metrics-collector | 269 +++++++
.../conf/unix/ams-env.sh | 16 +
.../conf/unix/ams-site.xml | 25 +
.../conf/unix/log4j.properties | 31 +
.../ambari-metrics-timelineservice/pom.xml | 593 ++++++++++++++
.../ApplicationHistoryStoreTestUtils.java | 84 ++
.../TestApplicationHistoryClientService.java | 209 +++++
.../TestApplicationHistoryManagerImpl.java | 76 ++
.../TestApplicationHistoryServer.java | 217 +++++
.../TestFileSystemApplicationHistoryStore.java | 233 ++++++
.../TestMemoryApplicationHistoryStore.java | 204 +++++
.../loadsimulator/data/TestAppMetrics.java | 134 ++++
.../metrics/loadsimulator/data/TestMetric.java | 80 ++
.../net/TestRestMetricsSender.java | 76 ++
.../net/TestStdOutMetricsSender.java | 37 +
.../util/TestRandomMetricsProvider.java | 36 +
.../util/TestTimeStampProvider.java | 51 ++
.../timeline/AbstractMiniHBaseClusterTest.java | 113 +++
.../AbstractPhoenixConnectionlessTest.java | 111 +++
.../timeline/TestMetricHostAggregate.java | 65 ++
.../timeline/TestPhoenixTransactSQL.java | 43 +
.../timeline/TestTimelineMetricStore.java | 81 ++
.../timeline/TestGenericObjectMapper.java | 102 +++
.../timeline/TestLeveldbTimelineStore.java | 253 ++++++
.../timeline/TestMemoryTimelineStore.java | 83 ++
.../timeline/TimelineStoreTestUtils.java | 789 +++++++++++++++++++
.../src/test/resources/hbase-default.xml | 36 +
.../4.2.0/phoenix-core-tests-4.2.0.jar | Bin 0 -> 992828 bytes
.../4.2.0/phoenix-core-tests-4.2.0.pom | 9 +
.../phoenix-core-tests/maven-metadata-local.xml | 12 +
30 files changed, 4068 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
new file mode 100644
index 0000000..9aabbdc
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -0,0 +1,269 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific
+
+#JAVA_HOME=/usr/jdk64/jdk1.7.0_45
+PIDFILE=/var/run/ambari-metrics-collector/ambari-metrics-collector.pid
+OUTFILE=/var/log/ambari-metrics-collector/ambari-metrics-collector.out
+
+HBASE_ZK_PID=/var/run/ams-hbase/hbase-hbase-zookeeper.pid
+HBASE_MASTER_PID=/var/run/ams-hbase/hbase-hbase-master.pid
+HBASE_RS_PID=/var/run/ams-hbase/hbase-hbase-regionserver.pid
+
+HBASE_DIR=/usr/lib/ams-hbase
+
+DAEMON_NAME=timelineserver
+
+COLLECTOR_CONF_DIR=/etc/ambari-metrics-collector/conf
+HBASE_CONF_DIR=/etc/ams-hbase/conf
+
+METRIC_COLLECTOR=ambari-metrics-collector
+
+STOP_TIMEOUT=5
+
+function hbase_daemon
+{
+ local daemon=$1
+ local cmd=$2
+ local pid
+
+ case "${daemon}" in
+ "master")
+ pid=${HBASE_MASTER_PID}
+ ;;
+ "zookeeper")
+ pid=${HBASE_ZK_PID}
+ ;;
+ "regionserver")
+ pid=${HBASE_RS_PID}
+ ;;
+ esac
+
+ daemon_status "${pid}"
+ if [[ $? == 0 ]]; then
+ echo "${daemon} is running as process $(cat "${pid}"). Continuing"
+ else
+ # stale pid file, so just remove it and continue on
+ rm -f "${pid}" >/dev/null 2>&1
+ fi
+
+ ${HBASE_DIR}/bin/hbase-daemon.sh --config ${HBASE_CONF_DIR} ${cmd} ${daemon}
+
+
+
+}
+
+function write_pidfile
+{
+ local pidfile="$1"
+ echo $! > "${pidfile}" 2>/dev/null
+ if [[ $? -gt 0 ]]; then
+ echo "ERROR: Cannot write pid ${pidfile}."
+ exit 1;
+ fi
+}
+
+function hadoop_java_setup
+{
+ # Bail if we did not detect it
+ if [[ -z "${JAVA_HOME}" ]]; then
+ echo "ERROR: JAVA_HOME is not set and could not be found."
+ exit 1
+ fi
+
+ if [[ ! -d "${JAVA_HOME}" ]]; then
+ echo "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
+ exit 1
+ fi
+
+ JAVA="${JAVA_HOME}/bin/java"
+
+ if [[ ! -x "$JAVA" ]]; then
+ echo "ERROR: $JAVA is not executable."
+ exit 1
+ fi
+ # shellcheck disable=SC2034
+ JAVA_HEAP_MAX=-Xmx1g
+ HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-1024}
+
+ # check envvars which might override default args
+ if [[ -n "$HADOOP_HEAPSIZE" ]]; then
+ # shellcheck disable=SC2034
+ JAVA_HEAP_MAX="-Xmx${HADOOP_HEAPSIZE}m"
+ fi
+}
+
+function daemon_status()
+{
+ #
+ # LSB 4.1.0 compatible status command (1)
+ #
+ # 0 = program is running
+ # 1 = dead, but still a pid (2)
+ # 2 = (not used by us)
+ # 3 = not running
+ #
+ # 1 - this is not an endorsement of the LSB
+ #
+ # 2 - technically, the specification says /var/run/pid, so
+ # we should never return this value, but we're giving
+ # them the benefit of a doubt and returning 1 even if
+ # our pid is not in in /var/run .
+ #
+
+ local pidfile="$1"
+ shift
+
+ local pid
+
+ if [[ -f "${pidfile}" ]]; then
+ pid=$(cat "${pidfile}")
+ if ps -p "${pid}" > /dev/null 2>&1; then
+ return 0
+ fi
+ return 1
+ fi
+ return 3
+}
+
+while [[ -z "${_ams_configs_done}" ]]; do
+ case $1 in
+ --config)
+ shift
+ confdir=$1
+ shift
+ if [[ -d "${confdir}" ]]; then
+ COLLECTOR_CONF_DIR="${confdir}"
+ elif [[ -z "${confdir}" ]]; then
+ echo "ERROR: No parameter provided for --config "
+ exit 1
+ else
+ echo "ERROR: Cannot find configuration directory \"${confdir}\""
+ exit 1
+ fi
+ ;;
+ *)
+ _ams_configs_done=true
+ ;;
+ esac
+done
+
+#execute ams-env.sh
+if [[ -f "${COLLECTOR_CONF_DIR}/ams-env.sh" ]]; then
+ . "${COLLECTOR_CONF_DIR}/ams-env.sh"
+else
+ echo "ERROR: Cannot execute ${COLLECTOR_CONF_DIR}/ams-env.sh." 2>&1
+ exit 1
+fi
+
+#TODO manage 3 hbase daemons for start/stop/status
+case "$1" in
+
+ start)
+ hadoop_java_setup
+
+ #hbase_daemon "zookeeper" "start"
+
+ hbase_daemon "master" "start"
+ #hbase_daemon "regionserver" "start"
+
+ sleep 30
+
+ CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
+ # YARN_OPTS="${YARN_OPTS} ${YARN_TIMELINESERVER_OPTS}"
+ # if [[ -n "${YARN_TIMELINESERVER_HEAPSIZE}" ]]; then
+ # JAVA_HEAP_MAX="-Xmx${YARN_TIMELINESERVER_HEAPSIZE}m"
+ # fi
+
+ # check if this is needed?
+ # export PHOENIX_JAR_PATH=/usr/lib/ambari-metrics/timelineservice/phoenix-client.jar
+ # export HBASE_CONF_DIR=${HBASE_DIR}/conf
+
+ daemon_status "${PIDFILE}"
+ if [[ $? == 0 ]]; then
+ echo "AMS is running as process $(cat "${PIDFILE}"). Exiting"
+ exit 1
+ else
+ # stale pid file, so just remove it and continue on
+ rm -f "${PIDFILE}" >/dev/null 2>&1
+ fi
+
+ nohup "${JAVA}" "-cp" "/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" "-Djava.net.preferIPv4Stack=true" "-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > $OUTFILE 2>&1 &
+ PID=$!
+ write_pidfile "${PIDFILE}"
+ sleep 2
+
+ echo "Verifying ${METRIC_COLLECTOR} process status..."
+ if [ -z "`ps ax -o pid | grep ${PID}`" ]; then
+ if [ -s ${OUTFILE} ]; then
+ echo "ERROR: ${METRIC_COLLECTOR} start failed. For more details, see ${OUTFILE}:"
+ echo "===================="
+ tail -n 10 ${OUTFILE}
+ echo "===================="
+ else
+ echo "ERROR: ${METRIC_COLLECTOR} start failed"
+ rm -f ${PIDFILE}
+ fi
+ echo "Collector out at: ${OUTFILE}"
+ exit -1
+ fi
+
+ echo "Collector successfully started."
+
+ ;;
+ stop)
+ pidfile=${PIDFILE}
+
+ if [[ -f "${pidfile}" ]]; then
+ pid=$(cat "$pidfile")
+
+ kill "${pid}" >/dev/null 2>&1
+ sleep "${STOP_TIMEOUT}"
+
+ if kill -0 "${pid}" > /dev/null 2>&1; then
+ echo "WARNING: ${METRIC_COLLECTOR} did not stop gracefully after ${STOP_TIMEOUT} seconds: Trying to kill with kill -9"
+ kill -9 "${pid}" >/dev/null 2>&1
+ fi
+
+ if ps -p "${pid}" > /dev/null 2>&1; then
+ echo "ERROR: Unable to kill ${pid}"
+ else
+ rm -f "${pidfile}" >/dev/null 2>&1
+ fi
+ fi
+
+ #stop hbase daemons
+ #hbase_daemon "zookeeper" "stop"
+ hbase_daemon "master" "stop"
+ #hbase_daemon "regionserver" "stop"
+
+
+ ;;
+ status)
+ daemon_status "${PIDFILE}"
+ if [[ $? == 0 ]]; then
+ echo "AMS is running as process $(cat "${PIDFILE}")."
+ else
+ echo "AMS is not running."
+ fi
+ #print embedded hbase daemons statuses?
+ ;;
+ restart)
+ ;;
+
+esac
+
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-env.sh
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-env.sh b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-env.sh
new file mode 100644
index 0000000..9928093
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-env.sh
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set environment variables here.
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-site.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-site.xml b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-site.xml
new file mode 100644
index 0000000..c2dd100
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ams-site.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one
+ ~ or more contributor license agreements. See the NOTICE file
+ ~ distributed with this work for additional information
+ ~ regarding copyright ownership. The ASF licenses this file
+ ~ to you under the Apache License, Version 2.0 (the
+ ~ "License"); you may not use this file except in compliance
+ ~ with the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<configuration>
+
+ <!-- Site specific AMS configuration properties -->
+
+</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/conf/unix/log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/log4j.properties b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/log4j.properties
new file mode 100644
index 0000000..8a9e2c8
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/log4j.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Define some default values that can be overridden by system properties
+# Root logger option
+log4j.rootLogger=INFO,file
+
+# Direct log messages to a log file
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=/var/log/ambari-metrics-collector/ambari-metrics-collector.log
+log4j.appender.file.MaxFileSize=80MB
+log4j.appender.file.MaxBackupIndex=60
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p [%t] %c{1}:%L - %m%n
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
new file mode 100644
index 0000000..d0a72ae
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -0,0 +1,593 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+ http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>ambari-metrics</artifactId>
+ <groupId>org.apache.ambari</groupId>
+ <version>0.1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>ambari-metrics-timelineservice</artifactId>
+ <version>0.1.0-SNAPSHOT</version>
+ <name>ambari-metrics-timelineservice</name>
+ <packaging>jar</packaging>
+
+ <properties>
+ <!-- Needed for generating FindBugs warnings using parent pom -->
+ <!--<yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>-->
+ <protobuf.version>2.5.0</protobuf.version>
+ <hadoop.version>2.4.0</hadoop.version>
+ </properties>
+
+ <repositories>
+ <repository>
+ <id>phoenix-core-tests</id>
+ <name>Phoenix Unit tests</name>
+ <url>file://${project.basedir}/src/test/resources/lib</url>
+ </repository>
+ </repositories>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>${project.build.directory}/lib</outputDirectory>
+ <includeScope>compile</includeScope>
+ <excludeScope>test</excludeScope>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assemblies/ats.xml</descriptor>
+ </descriptors>
+ <tarLongFileMode>gnu</tarLongFileMode>
+ </configuration>
+ <id>build-tarball</id>
+ <phase>none</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>com.github.goldin</groupId>
+ <artifactId>copy-maven-plugin</artifactId>
+ <version>0.2.5</version>
+ <executions>
+ <execution>
+ <id>create-archive</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy</goal>
+ </goals>
+ <configuration>
+ <resources>
+ <resource>
+ <targetPath>${project.build.directory}/embedded</targetPath>
+ <file>${hbase.tar}</file>
+ <unpack>true</unpack>
+ </resource>
+ </resources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>rpm-maven-plugin</artifactId>
+ <version>2.0.1</version>
+ <executions>
+ <execution>
+ <!-- unbinds rpm creation from maven lifecycle -->
+ <phase>none</phase>
+ <goals>
+ <goal>rpm</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <name>ambari-metrics-collector</name>
+ <copyright>2012, Apache Software Foundation</copyright>
+ <group>Development</group>
+ <description>Maven Recipe: RPM Package.</description>
+ <autoRequires>false</autoRequires>
+ <requires>
+ <require>${python.ver}</require>
+ </requires>
+
+ <defaultFilemode>644</defaultFilemode>
+ <defaultDirmode>755</defaultDirmode>
+ <defaultUsername>root</defaultUsername>
+ <defaultGroupname>root</defaultGroupname>
+
+ <mappings>
+ <mapping>
+ <!--jars-->
+ <directory>/usr/lib/ambari-metrics-collector/</directory>
+ <sources>
+ <source>
+ <location>target/lib</location>
+ </source>
+ <source>
+ <location>${project.build.directory}/${project.artifactId}-${project.version}.jar</location>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <!--embedded applications-->
+ <directory>/usr/lib/ams-hbase/</directory>
+ <sources>
+ <source>
+ <location>target/embedded/${hbase.folder}</location>
+ <excludes>
+ <exclude>bin/**</exclude>
+ <exclude>bin/*</exclude>
+ </excludes>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <directory>/usr/lib/ams-hbase/bin</directory>
+ <filemode>755</filemode>
+ <sources>
+ <source>
+ <location>target/embedded/${hbase.folder}/bin</location>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <directory>/usr/lib/ams-hbase/lib/</directory>
+ <sources>
+ <source>
+ <location>target/lib</location>
+ <includes>
+ <include>phoenix*.jar</include>
+ <include>antlr*.jar</include>
+ </includes>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <directory>/usr/sbin</directory>
+ <filemode>755</filemode>
+ <username>root</username>
+ <groupname>root</groupname>
+ <directoryIncluded>false</directoryIncluded>
+ <sources>
+ <source>
+ <location>conf/unix/ambari-metrics-collector</location>
+ <filter>false</filter>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <directory>/etc/ambari-metrics-collector/conf</directory>
+ <configuration>true</configuration>
+ <sources>
+ <source>
+ <location>conf/unix/ams-env.sh</location>
+ </source>
+ <source>
+ <location>conf/unix/ams-site.xml</location>
+ </source>
+ <source>
+ <location>conf/unix/log4j.properties</location>
+ </source>
+ <source>
+ <location>target/embedded/${hbase.folder}/conf/hbase-site.xml</location>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <directory>/etc/ams-hbase/conf</directory>
+ <configuration>true</configuration>
+ <sources>
+ <source>
+ <location>target/embedded/${hbase.folder}/conf</location>
+ <includes>
+ <include>*.*</include>
+ </includes>
+ </source>
+ </sources>
+ </mapping>
+ <mapping>
+ <directory>/var/run/ams-hbase</directory>
+ </mapping>
+ <mapping>
+ <directory>/var/run/ambari-metrics-collector</directory>
+ </mapping>
+ <mapping>
+ <directory>/var/log/ambari-metrics-collector</directory>
+ </mapping>
+ <mapping>
+ <directory>/var/lib/ambari-metrics-collector</directory>
+ </mapping>
+ </mappings>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <redirectTestOutputToFile>true</redirectTestOutputToFile>
+ <forkMode>always</forkMode>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-core</artifactId>
+ <version>4.2.0.2.2.0.0-2041</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.5</version>
+ </dependency>
+
+ <dependency>
+ <artifactId>ambari-metrics-hadoop-sink</artifactId>
+ <groupId>org.apache.ambari</groupId>
+ <version>0.1.0-SNAPSHOT</version>
+ </dependency>
+
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <version>2.5</version>
+ </dependency>
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
+ <scope>provided</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>commons-el</groupId>
+ <artifactId>commons-el</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-runtime</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>tomcat</groupId>
+ <artifactId>jasper-compiler</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.mortbay.jetty</groupId>
+ <artifactId>jsp-2.1-jetty</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-annotations</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>1.8.5</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>${hadoop.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.google.inject.extensions</groupId>
+ <artifactId>guice-servlet</artifactId>
+ <version>3.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.protobuf</groupId>
+ <artifactId>protobuf-java</artifactId>
+ <version>${protobuf.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ <version>4.10</version>
+ </dependency>
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>com.google.inject</groupId>
+ <artifactId>guice</artifactId>
+ <version>3.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey.jersey-test-framework</groupId>
+ <artifactId>jersey-test-framework-core</artifactId>
+ <version>1.11</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-json</artifactId>
+ <version>1.11</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey.contribs</groupId>
+ <artifactId>jersey-guice</artifactId>
+ <version>1.11</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-server</artifactId>
+ <version>1.11</version>
+ </dependency>
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ <version>${hadoop.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-api</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ <version>2.2.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.jettison</groupId>
+ <artifactId>jettison</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ <version>1.11</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ <version>1.11</version>
+ </dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>14.0.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-logging</groupId>
+ <artifactId>commons-logging</artifactId>
+ <version>1.1.1</version>
+ </dependency>
+
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-server-common</artifactId>
+ <version>${hadoop.version}</version>
+ </dependency>
+
+ <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+ <dependency>
+ <groupId>com.sun.jersey.jersey-test-framework</groupId>
+ <artifactId>jersey-test-framework-grizzly2</artifactId>
+ <scope>test</scope>
+ <version>1.11</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-core-asl</artifactId>
+ <version>1.9.9</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ <version>1.7.2</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ <version>1.7.2</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ <version>1.9.13</version>
+ </dependency>
+
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ <version>3.2.1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.fusesource.leveldbjni</groupId>
+ <artifactId>leveldbjni-all</artifactId>
+ <version>1.8</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <version>1.7.0</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.easymock</groupId>
+ <artifactId>easymock</artifactId>
+ <version>3.2</version>
+ <scope>test</scope>
+ </dependency>
+ <!-- for unit tests only -->
+ <dependency>
+ <groupId>org.apache.phoenix</groupId>
+ <artifactId>phoenix-core-tests</artifactId>
+ <version>4.2.0</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-it</artifactId>
+ <version>0.98.4-hadoop2</version>
+ <scope>test</scope>
+ <classifier>tests</classifier>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hbase</groupId>
+ <artifactId>hbase-testing-util</artifactId>
+ <version>0.98.4-hadoop2</version>
+ <scope>test</scope>
+ <optional>true</optional>
+ <exclusions>
+ <exclusion>
+ <groupId>org.jruby</groupId>
+ <artifactId>jruby-complete</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.powermock</groupId>
+ <artifactId>powermock-module-junit4</artifactId>
+ <version>1.4.9</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.powermock</groupId>
+ <artifactId>powermock-api-mockito</artifactId>
+ <version>1.4.9</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.powermock</groupId>
+ <artifactId>powermock-api-easymock</artifactId>
+ <version>1.4.9</version>
+ <scope>test</scope>
+ </dependency>
+
+ </dependencies>
+
+ <profiles>
+ <profile>
+ <id>sim</id>
+ <build>
+
+ <plugins>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assemblies/simulator.xml</descriptor>
+ </descriptors>
+ <tarLongFileMode>gnu</tarLongFileMode>
+ </configuration>
+ <executions>
+ <execution>
+ <id>build-tarball</id>
+ <phase>package</phase>
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.3.1</version>
+ <!-- The configuration of the plugin -->
+ <configuration>
+ <!-- Configuration of the archiver -->
+ <finalName>${pom.artifactId}-simulator-${pom.version}</finalName>
+ <archive>
+ <!-- Manifest specific configuration -->
+ <manifest>
+ <!-- Classpath is added to the manifest of the created jar file. -->
+ <addClasspath>true</addClasspath>
+ <!--
+ Configures the classpath prefix. This configuration option is
+ used to specify that all needed libraries are found under lib/
+ directory.
+ -->
+ <classpathPrefix></classpathPrefix>
+ <!-- Specifies the main class of the application -->
+ <mainClass>
+ org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.MetricsLoadSimulator
+ </mainClass>
+ </manifest>
+ </archive>
+ </configuration>
+ </plugin>
+ </plugins>
+
+ </build>
+
+ </profile>
+ </profiles>
+</project>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
new file mode 100644
index 0000000..c41b8a7
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryStoreTestUtils.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
+
+public class ApplicationHistoryStoreTestUtils {
+
+ protected ApplicationHistoryStore store;
+
+ protected void writeApplicationStartData(ApplicationId appId)
+ throws IOException {
+ store.applicationStarted(ApplicationStartData.newInstance(appId,
+ appId.toString(), "test type", "test queue", "test user", 0, 0));
+ }
+
+ protected void writeApplicationFinishData(ApplicationId appId)
+ throws IOException {
+ store.applicationFinished(ApplicationFinishData.newInstance(appId, 0,
+ appId.toString(), FinalApplicationStatus.UNDEFINED,
+ YarnApplicationState.FINISHED));
+ }
+
+ protected void writeApplicationAttemptStartData(
+ ApplicationAttemptId appAttemptId) throws IOException {
+ store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
+ appAttemptId, appAttemptId.toString(), 0,
+ ContainerId.newInstance(appAttemptId, 1)));
+ }
+
+ protected void writeApplicationAttemptFinishData(
+ ApplicationAttemptId appAttemptId) throws IOException {
+ store.applicationAttemptFinished(ApplicationAttemptFinishData.newInstance(
+ appAttemptId, appAttemptId.toString(), "test tracking url",
+ FinalApplicationStatus.UNDEFINED, YarnApplicationAttemptState.FINISHED));
+ }
+
+ protected void writeContainerStartData(ContainerId containerId)
+ throws IOException {
+ store.containerStarted(ContainerStartData.newInstance(containerId,
+ Resource.newInstance(0, 0), NodeId.newInstance("localhost", 0),
+ Priority.newInstance(containerId.getId()), 0));
+ }
+
+ protected void writeContainerFinishData(ContainerId containerId)
+ throws IOException {
+ store.containerFinished(ContainerFinishData.newInstance(containerId, 0,
+ containerId.toString(), 0, ContainerState.COMPLETE));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
new file mode 100644
index 0000000..6b06918
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryClientService.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerReport;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+// Timeline service client support is not enabled for AMS
+@Ignore
+public class TestApplicationHistoryClientService extends
+ ApplicationHistoryStoreTestUtils {
+
+ ApplicationHistoryServer historyServer = null;
+ String expectedLogUrl = null;
+
+ @Before
+ public void setup() {
+ historyServer = new ApplicationHistoryServer();
+ Configuration config = new YarnConfiguration();
+ expectedLogUrl = WebAppUtils.getHttpSchemePrefix(config) +
+ WebAppUtils.getAHSWebAppURLWithoutScheme(config) +
+ "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" +
+ "container_0_0001_01_000001/test user";
+ config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
+ MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
+ historyServer.init(config);
+ historyServer.start();
+ store =
+ ((ApplicationHistoryManagerImpl) historyServer.getApplicationHistory())
+ .getHistoryStore();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ historyServer.stop();
+ }
+
+ @Test
+ public void testApplicationReport() throws IOException, YarnException {
+ ApplicationId appId = null;
+ appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ writeApplicationFinishData(appId);
+ GetApplicationReportRequest request =
+ GetApplicationReportRequest.newInstance(appId);
+ GetApplicationReportResponse response =
+ historyServer.getClientService().getClientHandler()
+ .getApplicationReport(request);
+ ApplicationReport appReport = response.getApplicationReport();
+ Assert.assertNotNull(appReport);
+ Assert.assertEquals("application_0_0001", appReport.getApplicationId()
+ .toString());
+ Assert.assertEquals("test type", appReport.getApplicationType().toString());
+ Assert.assertEquals("test queue", appReport.getQueue().toString());
+ }
+
+ @Test
+ public void testApplications() throws IOException, YarnException {
+ ApplicationId appId = null;
+ appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ writeApplicationFinishData(appId);
+ ApplicationId appId1 = ApplicationId.newInstance(0, 2);
+ writeApplicationStartData(appId1);
+ writeApplicationFinishData(appId1);
+ GetApplicationsRequest request = GetApplicationsRequest.newInstance();
+ GetApplicationsResponse response =
+ historyServer.getClientService().getClientHandler()
+ .getApplications(request);
+ List<ApplicationReport> appReport = response.getApplicationList();
+ Assert.assertNotNull(appReport);
+ Assert.assertEquals(appId, appReport.get(0).getApplicationId());
+ Assert.assertEquals(appId1, appReport.get(1).getApplicationId());
+ }
+
+ @Test
+ public void testApplicationAttemptReport() throws IOException, YarnException {
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ writeApplicationAttemptStartData(appAttemptId);
+ writeApplicationAttemptFinishData(appAttemptId);
+ GetApplicationAttemptReportRequest request =
+ GetApplicationAttemptReportRequest.newInstance(appAttemptId);
+ GetApplicationAttemptReportResponse response =
+ historyServer.getClientService().getClientHandler()
+ .getApplicationAttemptReport(request);
+ ApplicationAttemptReport attemptReport =
+ response.getApplicationAttemptReport();
+ Assert.assertNotNull(attemptReport);
+ Assert.assertEquals("appattempt_0_0001_000001", attemptReport
+ .getApplicationAttemptId().toString());
+ }
+
+ @Test
+ public void testApplicationAttempts() throws IOException, YarnException {
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ ApplicationAttemptId appAttemptId1 =
+ ApplicationAttemptId.newInstance(appId, 2);
+ writeApplicationAttemptStartData(appAttemptId);
+ writeApplicationAttemptFinishData(appAttemptId);
+ writeApplicationAttemptStartData(appAttemptId1);
+ writeApplicationAttemptFinishData(appAttemptId1);
+ GetApplicationAttemptsRequest request =
+ GetApplicationAttemptsRequest.newInstance(appId);
+ GetApplicationAttemptsResponse response =
+ historyServer.getClientService().getClientHandler()
+ .getApplicationAttempts(request);
+ List<ApplicationAttemptReport> attemptReports =
+ response.getApplicationAttemptList();
+ Assert.assertNotNull(attemptReports);
+ Assert.assertEquals(appAttemptId, attemptReports.get(0)
+ .getApplicationAttemptId());
+ Assert.assertEquals(appAttemptId1, attemptReports.get(1)
+ .getApplicationAttemptId());
+ }
+
+ @Test
+ public void testContainerReport() throws IOException, YarnException {
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+ writeContainerStartData(containerId);
+ writeContainerFinishData(containerId);
+ writeApplicationFinishData(appId);
+ GetContainerReportRequest request =
+ GetContainerReportRequest.newInstance(containerId);
+ GetContainerReportResponse response =
+ historyServer.getClientService().getClientHandler()
+ .getContainerReport(request);
+ ContainerReport container = response.getContainerReport();
+ Assert.assertNotNull(container);
+ Assert.assertEquals(containerId, container.getContainerId());
+ Assert.assertEquals(expectedLogUrl, container.getLogUrl());
+ }
+
+ @Test
+ public void testContainers() throws IOException, YarnException {
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+ ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2);
+ writeContainerStartData(containerId);
+ writeContainerFinishData(containerId);
+ writeContainerStartData(containerId1);
+ writeContainerFinishData(containerId1);
+ writeApplicationFinishData(appId);
+ GetContainersRequest request =
+ GetContainersRequest.newInstance(appAttemptId);
+ GetContainersResponse response =
+ historyServer.getClientService().getClientHandler()
+ .getContainers(request);
+ List<ContainerReport> containers = response.getContainerList();
+ Assert.assertNotNull(containers);
+ Assert.assertEquals(containerId, containers.get(1).getContainerId());
+ Assert.assertEquals(containerId1, containers.get(0).getContainerId());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
new file mode 100644
index 0000000..aad23d9
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryManagerImpl.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestApplicationHistoryManagerImpl extends
+ ApplicationHistoryStoreTestUtils {
+ ApplicationHistoryManagerImpl applicationHistoryManagerImpl = null;
+
+ @Before
+ public void setup() throws Exception {
+ Configuration config = new Configuration();
+ config.setClass(YarnConfiguration.APPLICATION_HISTORY_STORE,
+ MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
+ applicationHistoryManagerImpl = new ApplicationHistoryManagerImpl();
+ applicationHistoryManagerImpl.init(config);
+ applicationHistoryManagerImpl.start();
+ store = applicationHistoryManagerImpl.getHistoryStore();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ applicationHistoryManagerImpl.stop();
+ }
+
+ @Test
+ @Ignore
+ public void testApplicationReport() throws IOException, YarnException {
+ ApplicationId appId = null;
+ appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ writeApplicationFinishData(appId);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ writeApplicationAttemptStartData(appAttemptId);
+ writeApplicationAttemptFinishData(appAttemptId);
+ ApplicationReport appReport =
+ applicationHistoryManagerImpl.getApplication(appId);
+ Assert.assertNotNull(appReport);
+ Assert.assertEquals(appId, appReport.getApplicationId());
+ Assert.assertEquals(appAttemptId,
+ appReport.getCurrentApplicationAttemptId());
+ Assert.assertEquals(appAttemptId.toString(), appReport.getHost());
+ Assert.assertEquals("test type", appReport.getApplicationType().toString());
+ Assert.assertEquals("test queue", appReport.getQueue().toString());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
new file mode 100644
index 0000000..3720852
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.service.Service.STATE;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+ .timeline.DefaultPhoenixDataSource;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+ .timeline.PhoenixHBaseAccessor;
+import org.apache.zookeeper.ClientCnxn;
+import org.easymock.EasyMock;
+import org.junit.*;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.Statement;
+
+import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+ .timeline.TimelineMetricConfiguration.METRICS_SITE_CONFIGURATION_FILE;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.*;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.junit.Assert.*;
+import static org.powermock.api.easymock.PowerMock.*;
+import static org.powermock.api.support.membermodification.MemberMatcher.method;
+import static org.powermock.api.support.membermodification.MemberModifier
+ .suppress;
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({ PhoenixHBaseAccessor.class, UserGroupInformation.class,
+ ClientCnxn.class, DefaultPhoenixDataSource.class})
+@PowerMockIgnore( {"javax.management.*"})
+public class TestApplicationHistoryServer {
+
+ ApplicationHistoryServer historyServer = null;
+ Configuration metricsConf = null;
+
+ @Rule
+ public TemporaryFolder folder = new TemporaryFolder();
+
+ @Before
+ @SuppressWarnings("all")
+ public void setup() throws URISyntaxException, IOException {
+ folder.create();
+ File hbaseSite = folder.newFile("hbase-site.xml");
+ File amsSite = folder.newFile("ams-site.xml");
+
+ FileUtils.writeStringToFile(hbaseSite, "<configuration>\n" +
+ " <property>\n" +
+ " <name>hbase.defaults.for.version.skip</name>\n" +
+ " <value>true</value>\n" +
+ " </property>" +
+ " <property> " +
+ " <name>hbase.zookeeper.quorum</name>\n" +
+ " <value>localhost</value>\n" +
+ " </property>" +
+ "</configuration>");
+
+ FileUtils.writeStringToFile(amsSite, "<configuration>\n" +
+ " <property>\n" +
+ " <name>test</name>\n" +
+ " <value>testReady</value>\n" +
+ " </property>\n" +
+ " <property>\n" +
+ " <name>timeline.metrics.host.aggregator.hourly.disabled</name>\n" +
+ " <value>true</value>\n" +
+ " <description>\n" +
+ " Disable host based hourly aggregations.\n" +
+ " </description>\n" +
+ " </property>\n" +
+ " <property>\n" +
+ " <name>timeline.metrics.host.aggregator.minute.disabled</name>\n" +
+ " <value>true</value>\n" +
+ " <description>\n" +
+ " Disable host based minute aggregations.\n" +
+ " </description>\n" +
+ " </property>\n" +
+ " <property>\n" +
+ " <name>timeline.metrics.cluster.aggregator.hourly.disabled</name>\n" +
+ " <value>true</value>\n" +
+ " <description>\n" +
+ " Disable cluster based hourly aggregations.\n" +
+ " </description>\n" +
+ " </property>\n" +
+ " <property>\n" +
+ " <name>timeline.metrics.cluster.aggregator.minute.disabled</name>\n" +
+ " <value>true</value>\n" +
+ " <description>\n" +
+ " Disable cluster based minute aggregations.\n" +
+ " </description>\n" +
+ " </property>" +
+ "</configuration>");
+
+ ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
+
+ // Add the conf dir to the classpath
+ // Chain the current thread classloader
+ URLClassLoader urlClassLoader = null;
+ try {
+ urlClassLoader = new URLClassLoader(new URL[] {
+ folder.getRoot().toURI().toURL() }, currentClassLoader);
+ } catch (MalformedURLException e) {
+ e.printStackTrace();
+ }
+
+ Thread.currentThread().setContextClassLoader(urlClassLoader);
+ metricsConf = new Configuration(false);
+ metricsConf.addResource(Thread.currentThread().getContextClassLoader()
+ .getResource(METRICS_SITE_CONFIGURATION_FILE).toURI().toURL());
+ assertNotNull(metricsConf.get("test"));
+ }
+
+ // simple test init/start/stop ApplicationHistoryServer. Status should change.
+ @Test(timeout = 50000)
+ public void testStartStopServer() throws Exception {
+ Configuration config = new YarnConfiguration();
+ UserGroupInformation ugi =
+ UserGroupInformation.createUserForTesting("ambari", new String[] {"ambari"});
+
+ mockStatic(UserGroupInformation.class);
+ expect(UserGroupInformation.getCurrentUser()).andReturn(ugi).anyTimes();
+ expect(UserGroupInformation.isSecurityEnabled()).andReturn(false).anyTimes();
+ config.set(YarnConfiguration.APPLICATION_HISTORY_STORE,
+ "org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore");
+
+ Connection connection = createNiceMock(Connection.class);
+ Statement stmt = createNiceMock(Statement.class);
+ mockStatic(DriverManager.class);
+ expect(DriverManager.getConnection("jdbc:phoenix:localhost:2181:/hbase"))
+ .andReturn(connection).anyTimes();
+ expect(connection.createStatement()).andReturn(stmt).anyTimes();
+ suppress(method(Statement.class, "executeUpdate", String.class));
+ connection.close();
+ expectLastCall();
+
+ EasyMock.replay(connection, stmt);
+ replayAll();
+
+ historyServer = new ApplicationHistoryServer();
+ historyServer.init(config);
+
+ verifyAll();
+
+ assertEquals(STATE.INITED, historyServer.getServiceState());
+ assertEquals(4, historyServer.getServices().size());
+ ApplicationHistoryClientService historyService =
+ historyServer.getClientService();
+ assertNotNull(historyServer.getClientService());
+ assertEquals(STATE.INITED, historyService.getServiceState());
+
+ historyServer.start();
+ assertEquals(STATE.STARTED, historyServer.getServiceState());
+ assertEquals(STATE.STARTED, historyService.getServiceState());
+ historyServer.stop();
+ assertEquals(STATE.STOPPED, historyServer.getServiceState());
+ }
+
+ // test launch method
+ @Ignore
+ @Test(timeout = 60000)
+ public void testLaunch() throws Exception {
+
+ UserGroupInformation ugi =
+ UserGroupInformation.createUserForTesting("ambari", new String[]{"ambari"});
+ mockStatic(UserGroupInformation.class);
+ expect(UserGroupInformation.getCurrentUser()).andReturn(ugi).anyTimes();
+ expect(UserGroupInformation.isSecurityEnabled()).andReturn(false).anyTimes();
+
+ ExitUtil.disableSystemExit();
+ try {
+ historyServer = ApplicationHistoryServer.launchAppHistoryServer(new String[0]);
+ } catch (ExitUtil.ExitException e) {
+ assertEquals(0, e.status);
+ ExitUtil.resetFirstExitException();
+ fail();
+ }
+ }
+
+ @After
+ public void stop() {
+ if (historyServer != null) {
+ historyServer.stop();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
new file mode 100644
index 0000000..bc16d36
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestFileSystemApplicationHistoryStore.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+import java.net.URI;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RawLocalFileSystem;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestFileSystemApplicationHistoryStore extends
+ ApplicationHistoryStoreTestUtils {
+
+ private FileSystem fs;
+ private Path fsWorkingPath;
+
+ @Before
+ public void setup() throws Exception {
+ fs = new RawLocalFileSystem();
+ Configuration conf = new Configuration();
+ fs.initialize(new URI("/"), conf);
+ fsWorkingPath = new Path("Test");
+ fs.delete(fsWorkingPath, true);
+ conf.set(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI, fsWorkingPath.toString());
+ store = new FileSystemApplicationHistoryStore();
+ store.init(conf);
+ store.start();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ store.stop();
+ fs.delete(fsWorkingPath, true);
+ fs.close();
+ }
+
+ @Test
+ public void testReadWriteHistoryData() throws IOException {
+ testWriteHistoryData(5);
+ testReadHistoryData(5);
+ }
+
+ private void testWriteHistoryData(int num) throws IOException {
+ testWriteHistoryData(num, false, false);
+ }
+
+ private void testWriteHistoryData(
+ int num, boolean missingContainer, boolean missingApplicationAttempt)
+ throws IOException {
+ // write application history data
+ for (int i = 1; i <= num; ++i) {
+ ApplicationId appId = ApplicationId.newInstance(0, i);
+ writeApplicationStartData(appId);
+
+ // write application attempt history data
+ for (int j = 1; j <= num; ++j) {
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, j);
+ writeApplicationAttemptStartData(appAttemptId);
+
+ if (missingApplicationAttempt && j == num) {
+ continue;
+ }
+ // write container history data
+ for (int k = 1; k <= num; ++k) {
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
+ writeContainerStartData(containerId);
+ if (missingContainer && k == num) {
+ continue;
+ }
+ writeContainerFinishData(containerId);
+ }
+ writeApplicationAttemptFinishData(appAttemptId);
+ }
+ writeApplicationFinishData(appId);
+ }
+ }
+
+ private void testReadHistoryData(int num) throws IOException {
+ testReadHistoryData(num, false, false);
+ }
+
+ private void testReadHistoryData(
+ int num, boolean missingContainer, boolean missingApplicationAttempt)
+ throws IOException {
+ // read application history data
+ Assert.assertEquals(num, store.getAllApplications().size());
+ for (int i = 1; i <= num; ++i) {
+ ApplicationId appId = ApplicationId.newInstance(0, i);
+ ApplicationHistoryData appData = store.getApplication(appId);
+ Assert.assertNotNull(appData);
+ Assert.assertEquals(appId.toString(), appData.getApplicationName());
+ Assert.assertEquals(appId.toString(), appData.getDiagnosticsInfo());
+
+ // read application attempt history data
+ Assert.assertEquals(num, store.getApplicationAttempts(appId).size());
+ for (int j = 1; j <= num; ++j) {
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, j);
+ ApplicationAttemptHistoryData attemptData =
+ store.getApplicationAttempt(appAttemptId);
+ Assert.assertNotNull(attemptData);
+ Assert.assertEquals(appAttemptId.toString(), attemptData.getHost());
+
+ if (missingApplicationAttempt && j == num) {
+ Assert.assertNull(attemptData.getDiagnosticsInfo());
+ continue;
+ } else {
+ Assert.assertEquals(appAttemptId.toString(),
+ attemptData.getDiagnosticsInfo());
+ }
+
+ // read container history data
+ Assert.assertEquals(num, store.getContainers(appAttemptId).size());
+ for (int k = 1; k <= num; ++k) {
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
+ ContainerHistoryData containerData = store.getContainer(containerId);
+ Assert.assertNotNull(containerData);
+ Assert.assertEquals(Priority.newInstance(containerId.getId()),
+ containerData.getPriority());
+ if (missingContainer && k == num) {
+ Assert.assertNull(containerData.getDiagnosticsInfo());
+ } else {
+ Assert.assertEquals(containerId.toString(),
+ containerData.getDiagnosticsInfo());
+ }
+ }
+ ContainerHistoryData masterContainer =
+ store.getAMContainer(appAttemptId);
+ Assert.assertNotNull(masterContainer);
+ Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
+ masterContainer.getContainerId());
+ }
+ }
+ }
+
+ @Test
+ public void testWriteAfterApplicationFinish() throws IOException {
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ writeApplicationFinishData(appId);
+ // write application attempt history data
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ try {
+ writeApplicationAttemptStartData(appAttemptId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is not opened"));
+ }
+ try {
+ writeApplicationAttemptFinishData(appAttemptId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is not opened"));
+ }
+ // write container history data
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+ try {
+ writeContainerStartData(containerId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is not opened"));
+ }
+ try {
+ writeContainerFinishData(containerId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is not opened"));
+ }
+ }
+
+ @Test
+ public void testMassiveWriteContainerHistoryData() throws IOException {
+ long mb = 1024 * 1024;
+ long usedDiskBefore = fs.getContentSummary(fsWorkingPath).getLength() / mb;
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ writeApplicationStartData(appId);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ for (int i = 1; i <= 100000; ++i) {
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
+ writeContainerStartData(containerId);
+ writeContainerFinishData(containerId);
+ }
+ writeApplicationFinishData(appId);
+ long usedDiskAfter = fs.getContentSummary(fsWorkingPath).getLength() / mb;
+ Assert.assertTrue((usedDiskAfter - usedDiskBefore) < 20);
+ }
+
+ @Test
+ public void testMissingContainerHistoryData() throws IOException {
+ testWriteHistoryData(3, true, false);
+ testReadHistoryData(3, true, false);
+ }
+
+ @Test
+ public void testMissingApplicationAttemptHistoryData() throws IOException {
+ testWriteHistoryData(3, false, true);
+ testReadHistoryData(3, false, true);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
new file mode 100644
index 0000000..7a45405
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
+import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestMemoryApplicationHistoryStore extends
+ ApplicationHistoryStoreTestUtils {
+
+ @Before
+ public void setup() {
+ store = new MemoryApplicationHistoryStore();
+ }
+
+ @Test
+ public void testReadWriteApplicationHistory() throws Exception {
+ // Out of order
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ try {
+ writeApplicationFinishData(appId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains(
+ "is stored before the start information"));
+ }
+ // Normal
+ int numApps = 5;
+ for (int i = 1; i <= numApps; ++i) {
+ appId = ApplicationId.newInstance(0, i);
+ writeApplicationStartData(appId);
+ writeApplicationFinishData(appId);
+ }
+ Assert.assertEquals(numApps, store.getAllApplications().size());
+ for (int i = 1; i <= numApps; ++i) {
+ appId = ApplicationId.newInstance(0, i);
+ ApplicationHistoryData data = store.getApplication(appId);
+ Assert.assertNotNull(data);
+ Assert.assertEquals(appId.toString(), data.getApplicationName());
+ Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo());
+ }
+ // Write again
+ appId = ApplicationId.newInstance(0, 1);
+ try {
+ writeApplicationStartData(appId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is already stored"));
+ }
+ try {
+ writeApplicationFinishData(appId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is already stored"));
+ }
+ }
+
+ @Test
+ public void testReadWriteApplicationAttemptHistory() throws Exception {
+ // Out of order
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ try {
+ writeApplicationAttemptFinishData(appAttemptId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains(
+ "is stored before the start information"));
+ }
+ // Normal
+ int numAppAttempts = 5;
+ writeApplicationStartData(appId);
+ for (int i = 1; i <= numAppAttempts; ++i) {
+ appAttemptId = ApplicationAttemptId.newInstance(appId, i);
+ writeApplicationAttemptStartData(appAttemptId);
+ writeApplicationAttemptFinishData(appAttemptId);
+ }
+ Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId)
+ .size());
+ for (int i = 1; i <= numAppAttempts; ++i) {
+ appAttemptId = ApplicationAttemptId.newInstance(appId, i);
+ ApplicationAttemptHistoryData data =
+ store.getApplicationAttempt(appAttemptId);
+ Assert.assertNotNull(data);
+ Assert.assertEquals(appAttemptId.toString(), data.getHost());
+ Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo());
+ }
+ writeApplicationFinishData(appId);
+ // Write again
+ appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
+ try {
+ writeApplicationAttemptStartData(appAttemptId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is already stored"));
+ }
+ try {
+ writeApplicationAttemptFinishData(appAttemptId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is already stored"));
+ }
+ }
+
+ @Test
+ public void testReadWriteContainerHistory() throws Exception {
+ // Out of order
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+ try {
+ writeContainerFinishData(containerId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains(
+ "is stored before the start information"));
+ }
+ // Normal
+ writeApplicationAttemptStartData(appAttemptId);
+ int numContainers = 5;
+ for (int i = 1; i <= numContainers; ++i) {
+ containerId = ContainerId.newInstance(appAttemptId, i);
+ writeContainerStartData(containerId);
+ writeContainerFinishData(containerId);
+ }
+ Assert
+ .assertEquals(numContainers, store.getContainers(appAttemptId).size());
+ for (int i = 1; i <= numContainers; ++i) {
+ containerId = ContainerId.newInstance(appAttemptId, i);
+ ContainerHistoryData data = store.getContainer(containerId);
+ Assert.assertNotNull(data);
+ Assert.assertEquals(Priority.newInstance(containerId.getId()),
+ data.getPriority());
+ Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo());
+ }
+ ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
+ Assert.assertNotNull(masterContainer);
+ Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
+ masterContainer.getContainerId());
+ writeApplicationAttemptFinishData(appAttemptId);
+ // Write again
+ containerId = ContainerId.newInstance(appAttemptId, 1);
+ try {
+ writeContainerStartData(containerId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is already stored"));
+ }
+ try {
+ writeContainerFinishData(containerId);
+ Assert.fail();
+ } catch (IOException e) {
+ Assert.assertTrue(e.getMessage().contains("is already stored"));
+ }
+ }
+
+ @Test
+ public void testMassiveWriteContainerHistory() throws IOException {
+ long mb = 1024 * 1024;
+ Runtime runtime = Runtime.getRuntime();
+ long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb;
+ int numContainers = 100000;
+ ApplicationId appId = ApplicationId.newInstance(0, 1);
+ ApplicationAttemptId appAttemptId =
+ ApplicationAttemptId.newInstance(appId, 1);
+ for (int i = 1; i <= numContainers; ++i) {
+ ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
+ writeContainerStartData(containerId);
+ writeContainerFinishData(containerId);
+ }
+ long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb;
+ Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java
new file mode 100644
index 0000000..499dab6
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestAppMetrics.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
+
+import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.Json;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestAppMetrics {
+ private static final String SAMPLE_SINGLE_METRIC_HOST_JSON = "{\n" +
+ " \"metrics\" : [ {\n" +
+ " \"instanceid\" : \"\",\n" +
+ " \"hostname\" : \"localhost\",\n" +
+ " \"metrics\" : {\n" +
+ " \"0\" : \"5.35\",\n" +
+ " \"5000\" : \"5.35\",\n" +
+ " \"10000\" : \"5.35\",\n" +
+ " \"15000\" : \"5.35\"\n" +
+ " },\n" +
+ " \"starttime\" : \"1411663170112\",\n" +
+ " \"appid\" : \"HOST\",\n" +
+ " \"metricname\" : \"disk_free\"\n" +
+ " } ]\n" +
+ "}";
+
+ private static final String SAMPLE_TWO_METRIC_HOST_JSON = "{\n" +
+ " \"metrics\" : [ {\n" +
+ " \"instanceid\" : \"\",\n" +
+ " \"hostname\" : \"localhost\",\n" +
+ " \"metrics\" : {\n" +
+ " \"0\" : \"5.35\",\n" +
+ " \"5000\" : \"5.35\",\n" +
+ " \"10000\" : \"5.35\",\n" +
+ " \"15000\" : \"5.35\"\n" +
+ " },\n" +
+ " \"starttime\" : \"0\",\n" +
+ " \"appid\" : \"HOST\",\n" +
+ " \"metricname\" : \"disk_free\"\n" +
+ " }, {\n" +
+ " \"instanceid\" : \"\",\n" +
+ " \"hostname\" : \"localhost\",\n" +
+ " \"metrics\" : {\n" +
+ " \"0\" : \"94.0\",\n" +
+ " \"5000\" : \"94.0\",\n" +
+ " \"10000\" : \"94.0\",\n" +
+ " \"15000\" : \"94.0\"\n" +
+ " },\n" +
+ " \"starttime\" : \"0\",\n" +
+ " \"appid\" : \"HOST\",\n" +
+ " \"metricname\" : \"mem_cached\"\n" +
+ " } ]\n" +
+ "}";
+
+ private long[] timestamps;
+
+ @Before
+ public void setUp() throws Exception {
+ timestamps = new long[4];
+ timestamps[0] = 0;
+ timestamps[1] = timestamps[0] + 5000;
+ timestamps[2] = timestamps[1] + 5000;
+ timestamps[3] = timestamps[2] + 5000;
+
+ }
+
+ @Test
+ public void testHostDiskMetricsSerialization() throws IOException {
+ long timestamp = 1411663170112L;
+ AppMetrics appMetrics = new AppMetrics(new ApplicationInstance("localhost", AppID.HOST, ""), timestamp);
+
+ Metric diskFree = appMetrics.createMetric("disk_free");
+ double value = 5.35;
+
+ diskFree.putMetric(timestamps[0], Double.toString(value));
+ diskFree.putMetric(timestamps[1], Double.toString(value));
+ diskFree.putMetric(timestamps[2], Double.toString(value));
+ diskFree.putMetric(timestamps[3], Double.toString(value));
+
+ appMetrics.addMetric(diskFree);
+
+ String expected = SAMPLE_SINGLE_METRIC_HOST_JSON;
+ String s = new Json(true).serialize(appMetrics);
+
+ assertEquals("Serialized Host Metrics", expected, s);
+ }
+
+
+ @Test
+ public void testSingleHostManyMetricsSerialization() throws IOException {
+ AppMetrics appMetrics = new AppMetrics(new ApplicationInstance("localhost", AppID.HOST, ""), timestamps[0]);
+
+ Metric diskFree = appMetrics.createMetric("disk_free");
+ double value = 5.35;
+ diskFree.putMetric(timestamps[0], Double.toString(value));
+ diskFree.putMetric(timestamps[1], Double.toString(value));
+ diskFree.putMetric(timestamps[2], Double.toString(value));
+ diskFree.putMetric(timestamps[3], Double.toString(value));
+
+ appMetrics.addMetric(diskFree);
+
+ Metric memCache = appMetrics.createMetric("mem_cached");
+ double memVal = 94;
+ memCache.putMetric(timestamps[0], Double.toString(memVal));
+ memCache.putMetric(timestamps[1], Double.toString(memVal));
+ memCache.putMetric(timestamps[2], Double.toString(memVal));
+ memCache.putMetric(timestamps[3], Double.toString(memVal));
+
+ appMetrics.addMetric(memCache);
+
+ String expected = SAMPLE_TWO_METRIC_HOST_JSON;
+ String s = new Json(true).serialize(appMetrics);
+
+ assertEquals("Serialized Host Metrics", expected, s);
+ }
+}
\ No newline at end of file