You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2014/11/28 19:04:27 UTC

ambari git commit: AMBARI-8471. Cleanup unncessary dep jars from amabri-metrics rpm. (mpapirkovskyy)

Repository: ambari
Updated Branches:
  refs/heads/branch-metrics-dev d6cea463e -> ba45a3f66


AMBARI-8471. Cleanup unncessary dep jars from amabri-metrics rpm. (mpapirkovskyy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ba45a3f6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ba45a3f6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ba45a3f6

Branch: refs/heads/branch-metrics-dev
Commit: ba45a3f663efe3e25b31d4b767b1ab32dcabe908
Parents: d6cea46
Author: Myroslav Papirkovskyy <mp...@hortonworks.com>
Authored: Fri Nov 28 20:04:14 2014 +0200
Committer: Myroslav Papirkovskyy <mp...@hortonworks.com>
Committed: Fri Nov 28 20:04:14 2014 +0200

----------------------------------------------------------------------
 .../ambari-metrics-hadoop-sink/pom.xml          |  23 +---
 .../conf/unix/ambari-metrics-collector          |   2 +-
 .../conf/unix/log4j.properties                  |  31 +++++
 .../pom.xml                                     |  25 +++-
 .../TimelineMetricAggregatorFactory.java        |  18 +--
 .../ambari-metrics-host-monitoring/pom.xml      |   4 +
 .../src/main/package/rpm/preremove.sh           |  28 ++++
 ambari-metrics/pom.xml                          |   2 +-
 .../services/HBASE/package/scripts/params.py    |   1 +
 .../services/HDFS/configuration/hadoop-env.xml  |   3 -
 .../AMS/configuration/ams-hbase-env.xml         |   4 +
 .../AMS/configuration/ams-hbase-site.xml        |  22 +++
 .../services/AMS/configuration/ams-log4j.xml    |  59 ++++++++
 .../stacks/HDP/2.2/services/AMS/metainfo.xml    |   1 +
 .../HDP/2.2/services/AMS/package/scripts/ams.py |  16 +++
 .../2.2/services/AMS/package/scripts/hbase.py   |   4 +-
 .../2.2/services/AMS/package/scripts/params.py  |   9 +-
 .../services/HBASE/configuration/hbase-env.xml  | 137 +++++++++++++++++++
 .../services/HDFS/configuration/hadoop-env.xml  | 128 ++++++++++++++++-
 19 files changed, 478 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml b/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
index 7fd898c..0397e2e 100644
--- a/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
+++ b/ambari-metrics/ambari-metrics-hadoop-sink/pom.xml
@@ -31,20 +31,6 @@ limitations under the License.
   <build>
     <plugins>
       <plugin>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>${project.build.directory}/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
         <artifactId>maven-assembly-plugin</artifactId>
         <configuration>
           <descriptors>
@@ -123,7 +109,7 @@ limitations under the License.
           <description>Maven Recipe: RPM Package.</description>
           <mappings>
             <mapping>
-              <directory>/usr/lib/hadoop/lib</directory>
+              <directory>/usr/lib/ambari-metrics-hadoop-sink</directory>
               <filemode>644</filemode>
               <username>root</username>
               <groupname>root</groupname>
@@ -131,9 +117,10 @@ limitations under the License.
                 <source>
                   <location>target/${project.artifactId}-${project.version}.jar</location>
                 </source>
-                <source>
-                  <location>target/lib</location>
-                </source>
+                <softlinkSource>
+                  <destination>ambari-metrics-hadoop-sink.jar</destination>
+                  <location>/usr/lib/ambari-metrics-hadoop-sink/${project.artifactId}-${project.version}.jar</location>
+                </softlinkSource>
               </sources>
 
             </mapping>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/ambari-metrics-collector
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/ambari-metrics-collector b/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/ambari-metrics-collector
index b6e17cf..9aabbdc 100644
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/ambari-metrics-collector
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/ambari-metrics-collector
@@ -27,7 +27,7 @@ HBASE_DIR=/usr/lib/ams-hbase
 DAEMON_NAME=timelineserver
 
 COLLECTOR_CONF_DIR=/etc/ambari-metrics-collector/conf
-HBASE_CONF_DIR=${COLLECTOR_CONF_DIR}
+HBASE_CONF_DIR=/etc/ams-hbase/conf
 
 METRIC_COLLECTOR=ambari-metrics-collector
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/log4j.properties b/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/log4j.properties
new file mode 100644
index 0000000..8a9e2c8
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/conf/unix/log4j.properties
@@ -0,0 +1,31 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Define some default values that can be overridden by system properties
+# Root logger option
+log4j.rootLogger=INFO,file
+
+# Direct log messages to a log file
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=/var/log/ambari-metrics-collector/ambari-metrics-collector.log
+log4j.appender.file.MaxFileSize=80MB
+log4j.appender.file.MaxBackupIndex=60
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p [%t] %c{1}:%L - %m%n
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-hadoop-timelineservice/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/pom.xml b/ambari-metrics/ambari-metrics-hadoop-timelineservice/pom.xml
index ae2872d..6115d7b 100644
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/pom.xml
@@ -152,6 +152,7 @@
                 <source>
                   <location>target/embedded/${hbase.folder}</location>
                   <excludes>
+                    <exclude>bin/**</exclude>
                     <exclude>bin/*</exclude>
                   </excludes>
                 </source>
@@ -187,17 +188,39 @@
               <sources>
                 <source>
                   <location>conf/unix/ambari-metrics-collector</location>
-                  <filter>true</filter>
+                  <filter>false</filter>
                 </source>
               </sources>
             </mapping>
             <mapping>
               <directory>/etc/ambari-metrics-collector/conf</directory>
               <configuration>true</configuration>
+              <sources>
+                <source>
+                  <location>conf/unix/ams-env.sh</location>
+                </source>
+                <source>
+                  <location>conf/unix/ams-site.xml</location>
+                </source>
+                <source>
+                  <location>conf/unix/log4j.properties</location>
+                </source>
+                <source>
+                  <location>target/embedded/${hbase.folder}/conf/hbase-site.xml</location>
+                </source>
+              </sources>
             </mapping>
             <mapping>
               <directory>/etc/ams-hbase/conf</directory>
               <configuration>true</configuration>
+              <sources>
+                <source>
+                  <location>target/embedded/${hbase.folder}/conf</location>
+                  <includes>
+                    <include>*.*</include>
+                  </includes>
+                </source>
+              </sources>
             </mapping>
             <mapping>
               <directory>/var/run/ams-hbase</directory>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregatorFactory.java b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregatorFactory.java
index e8b7dfc..8b10079 100644
--- a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregatorFactory.java
+++ b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricAggregatorFactory.java
@@ -15,9 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline;
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+import static java.util.concurrent.TimeUnit.SECONDS;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.hadoop.conf.Configuration;
 import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
@@ -48,8 +48,9 @@ public class TimelineMetricAggregatorFactory {
       TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
     String checkpointLocation = FilenameUtils.concat(checkpointDir,
       MINUTE_AGGREGATE_CHECKPOINT_FILE);
-    long sleepInterval = metricsConf.getLong
-      (HOST_AGGREGATOR_MINUTE_SLEEP_INTERVAL, 300000l);  // 5 mins
+    long sleepIntervalMillis = SECONDS.toMillis(metricsConf.getLong
+      (HOST_AGGREGATOR_MINUTE_SLEEP_INTERVAL, 300l));  // 5 mins
+
     int checkpointCutOffMultiplier = metricsConf.getInt
       (HOST_AGGREGATOR_MINUTE_CHECKPOINT_CUTOFF_MULTIPLIER, 3);
     String hostAggregatorDisabledParam = HOST_AGGREGATOR_MINUTE_DISABLED;
@@ -59,7 +60,7 @@ public class TimelineMetricAggregatorFactory {
 
     return new TimelineMetricAggregator(hBaseAccessor, metricsConf,
       checkpointLocation,
-      sleepInterval,
+      sleepIntervalMillis,
       checkpointCutOffMultiplier,
       hostAggregatorDisabledParam,
       inputTableName,
@@ -74,8 +75,9 @@ public class TimelineMetricAggregatorFactory {
       TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
     String checkpointLocation = FilenameUtils.concat(checkpointDir,
       MINUTE_AGGREGATE_HOURLY_CHECKPOINT_FILE);
-    long sleepInterval = metricsConf.getLong
-      (HOST_AGGREGATOR_HOUR_SLEEP_INTERVAL, 3600000l);
+    long sleepIntervalMillis = SECONDS.toMillis(metricsConf.getLong
+      (HOST_AGGREGATOR_HOUR_SLEEP_INTERVAL, 3600l));
+
     int checkpointCutOffMultiplier = metricsConf.getInt
       (HOST_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_MULTIPLIER, 2);
     String hostAggregatorDisabledParam = HOST_AGGREGATOR_HOUR_DISABLED;
@@ -85,7 +87,7 @@ public class TimelineMetricAggregatorFactory {
 
     return new TimelineMetricAggregator(hBaseAccessor, metricsConf,
       checkpointLocation,
-      sleepInterval,
+      sleepIntervalMillis,
       checkpointCutOffMultiplier,
       hostAggregatorDisabledParam,
       inputTableName,

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/pom.xml b/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
index 8c4d112..b6e44ef 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
+++ b/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
@@ -124,6 +124,10 @@
             <require>gcc</require>
             <require>python-devel</require>
           </requires>
+          <preremoveScriptlet>
+            <scriptFile>src/main/package/rpm/preremove.sh</scriptFile>
+            <fileEncoding>utf-8</fileEncoding>
+          </preremoveScriptlet>
           <mappings>
             <mapping>
               <directory>${resmonitor.install.dir}</directory>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/ambari-metrics-host-monitoring/src/main/package/rpm/preremove.sh
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/package/rpm/preremove.sh b/ambari-metrics/ambari-metrics-host-monitoring/src/main/package/rpm/preremove.sh
new file mode 100644
index 0000000..9789127
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/package/rpm/preremove.sh
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+# WARNING: This script is performed not only on uninstall, but also
+# during package update. See http://www.ibm.com/developerworks/library/l-rpm2/
+# for details
+
+RESOURCE_MONITORING_DIR=/usr/lib/python2.6/site-packages/resource_monitoring
+PSUTIL_DIR="${RESOURCE_MONITORING_DIR}/psutil"
+
+
+if [ -d "${PSUTIL_DIR}" ]; then
+  rm -rf "${PSUTIL_DIR}/*"
+fi
+
+exit 0
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-metrics/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/pom.xml b/ambari-metrics/pom.xml
index 2ee4c1e..257eade 100644
--- a/ambari-metrics/pom.xml
+++ b/ambari-metrics/pom.xml
@@ -35,7 +35,7 @@
     <python.ver>python &gt;= 2.6</python.ver>
     <deb.python.ver>python (&gt;= 2.6)</deb.python.ver>
     <!--TODO change to HDP URL-->
-    <hbase.tar>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0/tars/hbase-0.98.4.2.2.0.0-2041-hadoop2.tar.gz</hbase.tar>
+    <hbase.tar>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/GA/2.2.0.0/tars/hbase-0.98.4.2.2.0.0-2041-hadoop2.tar.gz</hbase.tar>
     <hbase.folder>hbase-0.98.4.2.2.0.0-2041-hadoop2</hbase.folder>
   </properties>
   <repositories>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
index 55130cf..d4c271f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
@@ -84,6 +84,7 @@ master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf")
 regionserver_jaas_config_file = format("{hbase_conf_dir}/hbase_regionserver_jaas.conf")
 
 ganglia_server_hosts = default('/clusterHostInfo/ganglia_server_host', []) # is not passed when ganglia is not present
+has_ganglia_server = not len(ganglia_server_hosts) == 0
 ganglia_server_host = '' if len(ganglia_server_hosts) == 0 else ganglia_server_hosts[0]
 
 ams_collector_hosts = default("/clusterHostInfo/metric_collector_hosts", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml
index 41274c7..b3935d7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/configuration/hadoop-env.xml
@@ -184,9 +184,6 @@ if [ -d "/usr/lib/tez" ]; then
   export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf
 fi
 
-#TODO temporary addition
-export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/hadoop/lib/*
-
 # Setting path to hdfs command line
 export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
index e0015ea..7a61c60 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
@@ -74,6 +74,10 @@
       # Extra Java CLASSPATH elements. Optional.
       export HBASE_CLASSPATH=${HBASE_CLASSPATH}
 
+      if [ -f "/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar" ]; then
+        export HBASE_CLASSPATH=${HBASE_CLASSPATH}:/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+      fi
+
       # The maximum amount of heap to use, in MB. Default is 1000.
       # export HBASE_HEAPSIZE=1000
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
index 8446725..eb07685 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
@@ -91,6 +91,16 @@
     <description>The port for the HBase RegionServer web UI.</description>
   </property>
   <property>
+    <name>hbase.master.port</name>
+    <value>61300</value>
+    <description>The port for the HBase Master web UI.</description>
+  </property>
+  <property>
+    <name>hbase.regionserver.port</name>
+    <value>61320</value>
+    <description>The port for the HBase RegionServer web UI.</description>
+  </property>
+  <property>
     <name>hbase.hregion.majorcompaction</name>
     <value>0</value>
     <description>
@@ -205,6 +215,18 @@
     </description>
   </property>
   <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>61181</value>
+  </property>
+  <property>
+    <name>hbase.zookeeper.peerport</name>
+    <value>61288</value>
+  </property>
+  <property>
+    <name>hbase.zookeeper.leaderport</name>
+    <value>61388</value>
+  </property>
+  <property>
     <name>hbase.hstore.blockingStoreFiles</name>
     <value>200</value>
     <description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-log4j.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-log4j.xml
new file mode 100644
index 0000000..3f2e148
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-log4j.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<configuration supports_final="false">
+
+  <property>
+    <name>content</name>
+    <description>Custom log4j.properties</description>
+    <value>
+      #
+      # Licensed to the Apache Software Foundation (ASF) under one
+      # or more contributor license agreements.  See the NOTICE file
+      # distributed with this work for additional information
+      # regarding copyright ownership.  The ASF licenses this file
+      # to you under the Apache License, Version 2.0 (the
+      # "License"); you may not use this file except in compliance
+      # with the License.  You may obtain a copy of the License at
+      #
+      #     http://www.apache.org/licenses/LICENSE-2.0
+      #
+      # Unless required by applicable law or agreed to in writing, software
+      # distributed under the License is distributed on an "AS IS" BASIS,
+      # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+      # See the License for the specific language governing permissions and
+      # limitations under the License.
+      #
+
+      # Define some default values that can be overridden by system properties
+      # Root logger option
+      log4j.rootLogger=INFO,file
+
+      # Direct log messages to a log file
+      log4j.appender.file=org.apache.log4j.RollingFileAppender
+      log4j.appender.file.File=/var/log/ambari-metrics-collector/ambari-metrics-collector.log
+      log4j.appender.file.MaxFileSize=80MB
+      log4j.appender.file.MaxBackupIndex=60
+      log4j.appender.file.layout=org.apache.log4j.PatternLayout
+      log4j.appender.file.layout.ConversionPattern=%d{ABSOLUTE} %5p [%t] %c{1}:%L - %m%n
+    </value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/metainfo.xml
index 2839387..51d8177 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/metainfo.xml
@@ -92,6 +92,7 @@
 
       <configuration-dependencies>
         <config-type>ams-site</config-type>
+        <config-type>ams-log4j</config-type>
         <config-type>ams-hbase-policy</config-type>
         <config-type>ams-hbase-site</config-type>
         <config-type>ams-hbase-env</config-type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/ams.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/ams.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/ams.py
index 2e5ce35..1f09d7d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/ams.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/ams.py
@@ -39,6 +39,22 @@ def ams(name=None):
               group=params.user_group
     )
 
+    XmlConfig( "hbase-site.xml",
+               conf_dir = params.ams_collector_conf_dir,
+               configurations = params.config['configurations']['ams-hbase-site'],
+               configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'],
+               owner = params.ams_user,
+               group = params.user_group
+    )
+
+    if (params.log4j_props != None):
+      File(format("{params.ams_collector_conf_dir}/log4j.properties"),
+           mode=0644,
+           group=params.user_group,
+           owner=params.ams_user,
+           content=params.log4j_props
+      )
+
     File(format("{ams_collector_conf_dir}/ams-env.sh"),
          owner=params.ams_user,
          content=InlineTemplate(params.ams_env_sh_template)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/hbase.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/hbase.py
index c21ab4c..d861338 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/hbase.py
@@ -99,12 +99,12 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
       recursive = True
     )
 
-  if (params.log4j_props != None):
+  if (params.hbase_log4j_props != None):
     File(format("{params.hbase_conf_dir}/log4j.properties"),
          mode=0644,
          group=params.user_group,
          owner=params.hbase_user,
-         content=params.log4j_props
+         content=params.hbase_log4j_props
     )
   elif (os.path.exists(format("{params.hbase_conf_dir}/log4j.properties"))):
     File(format("{params.hbase_conf_dir}/log4j.properties"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/params.py
index 6488a79..278bb90 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/scripts/params.py
@@ -69,7 +69,7 @@ else:
 
 hadoop_conf_dir = "/etc/hadoop/conf"
 #hbase_conf_dir = "/etc/ams-hbase/conf"
-hbase_conf_dir = ams_collector_conf_dir
+hbase_conf_dir = "/etc/ams-hbase/conf"
 hbase_excluded_hosts = config['commandParams']['excluded_hosts']
 hbase_drain_only = config['commandParams']['mark_draining_only']
 hbase_included_hosts = config['commandParams']['included_hosts']
@@ -137,7 +137,12 @@ kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/
 
 #log4j.properties
 if (('ams-hbase-log4j' in config['configurations']) and ('content' in config['configurations']['ams-hbase-log4j'])):
-  log4j_props = config['configurations']['ams-hbase-log4j']['content']
+  hbase_log4j_props = config['configurations']['ams-hbase-log4j']['content']
+else:
+  hbase_log4j_props = None
+
+if (('ams-log4j' in config['configurations']) and ('content' in config['configurations']['ams-log4j'])):
+  log4j_props = config['configurations']['ams-log4j']['content']
 else:
   log4j_props = None
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
new file mode 100644
index 0000000..0fc6f16
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/configuration/hbase-env.xml
@@ -0,0 +1,137 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<configuration>
+  <property>
+    <name>hbase_log_dir</name>
+    <value>/var/log/hbase</value>
+    <description>Log Directories for HBase.</description>
+  </property>
+  <property>
+    <name>hbase_pid_dir</name>
+    <value>/var/run/hbase</value>
+    <description>Pid Directory for HBase.</description>
+  </property>
+  <property>
+    <name>hbase_regionserver_heapsize</name>
+    <value>1024</value>
+    <description>HBase RegionServer Heap Size.</description>
+  </property>
+  <property>
+    <name>hbase_regionserver_xmn_max</name>
+    <value>512</value>
+    <description>HBase RegionServer maximum value for minimum heap size.</description>
+  </property>
+  <property>
+    <name>hbase_regionserver_xmn_ratio</name>
+    <value>0.2</value>
+    <description>HBase RegionServer minimum heap size is calculated as a percentage of max heap size.</description>
+  </property>
+  <property>
+    <name>hbase_master_heapsize</name>
+    <value>1024</value>
+    <description>HBase Master Heap Size</description>
+  </property>
+   <property>
+    <name>hbase_user</name>
+    <value>hbase</value>
+    <property-type>USER</property-type>
+    <description>HBase User Name.</description>
+  </property>
+
+  <!-- hbase-env.sh -->
+  <property>
+    <name>content</name>
+    <description>This is the jinja template for hbase-env.sh file</description>
+    <value>
+# Set environment variables here.
+
+# The java implementation to use. Java 1.6 required.
+export JAVA_HOME={{java64_home}}
+
+# HBase Configuration directory
+export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}
+
+# Extra Java CLASSPATH elements. Optional.
+export HBASE_CLASSPATH=${HBASE_CLASSPATH}
+
+if [ -f "/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar" ]; then
+  export HBASE_CLASSPATH=${HBASE_CLASSPATH}:/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+fi
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+# export HBASE_HEAPSIZE=1000
+
+# Extra Java runtime options.
+# Below are what we set by default. May only work with SUN JVM.
+# For more on why as well as other possible settings,
+# see http://wiki.apache.org/hadoop/PerformanceTuning
+export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`"
+# Uncomment below to enable java garbage collection logging.
+# export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+# Uncomment and adjust to enable JMX exporting
+# See jmxremote.password and jmxremote.access in $JRE_HOME/lib/management to configure remote password access.
+# More details at: http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
+#
+# export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false"
+# If you want to configure BucketCache, specify '-XX: MaxDirectMemorySize=' with proper direct memory size
+# export HBASE_THRIFT_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10103"
+# export HBASE_ZOOKEEPER_OPTS="$HBASE_JMX_BASE -Dcom.sun.management.jmxremote.port=10104"
+
+# File naming hosts on which HRegionServers will run. $HBASE_HOME/conf/regionservers by default.
+export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers
+
+# Extra ssh options. Empty by default.
+# export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
+
+# Where log files are stored. $HBASE_HOME/logs by default.
+export HBASE_LOG_DIR={{log_dir}}
+
+# A string representing this instance of hbase. $USER by default.
+# export HBASE_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes. See 'man nice'.
+# export HBASE_NICENESS=10
+
+# The directory where pid files are stored. /tmp by default.
+export HBASE_PID_DIR={{pid_dir}}
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HBASE_SLAVE_SLEEP=0.1
+
+# Tell HBase whether it should manage it's own instance of Zookeeper or not.
+export HBASE_MANAGES_ZK=false
+
+{% if security_enabled %}
+export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}}"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}}"
+{% else %}
+export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}}"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}}"
+{% endif %}
+    </value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba45a3f6/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 8907098..ba9b3b2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -21,9 +21,131 @@
 -->
 
 <configuration>
+  <!-- hadoop-env.sh -->
   <property>
-    <name>rpm_version</name>
-    <value>2.9.9.9</value>
-    <description>Hadoop RPM version</description>
+    <name>content</name>
+    <description>This is the jinja template for hadoop-env.sh file</description>
+    <value>
+      # Set Hadoop-specific environment variables here.
+
+      # The only required environment variable is JAVA_HOME.  All others are
+      # optional.  When running a distributed configuration it is best to
+      # set JAVA_HOME in this file, so that it is correctly defined on
+      # remote nodes.
+
+      # The java implementation to use.  Required.
+      export JAVA_HOME={{java_home}}
+      export HADOOP_HOME_WARN_SUPPRESS=1
+
+      # Hadoop home directory
+      export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+      # Hadoop Configuration Directory
+
+      {# this is different for HDP1 #}
+      # Path to jsvc required by secure HDP 2.0 datanode
+      export JSVC_HOME={{jsvc_path}}
+
+
+      # The maximum amount of heap to use, in MB. Default is 1000.
+      export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
+
+      export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms{{namenode_heapsize}}"
+
+      # Extra Java runtime options.  Empty by default.
+      export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}"
+
+      # Command specific options appended to HADOOP_OPTS when specified
+      export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} -XX:PermSize={{namenode_opt_permsize}} -XX:MaxPermSize={{namenode_opt_maxpermsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
+      HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC -XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log -XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} -Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}"
+
+      HADOOP_TASKTRACKER_OPTS="-server -Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
+      export HADOOP_DATANODE_OPTS="-server -XX:ParallelGCThreads=4 -XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log -XX:NewSize=200m -XX:MaxNewSize=200m -XX:PermSize=128m -XX:MaxPermSize=256m -Xloggc:/var/log/hadoop/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xms{{dtnode_heapsize}} -Xmx{{dtnode_heapsize}} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_DATANODE_OPTS}"
+      HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
+
+      export HADOOP_SECONDARYNAMENODE_OPTS=$HADOOP_NAMENODE_OPTS
+
+      # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+      export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m -XX:MaxPermSize=512m $HADOOP_CLIENT_OPTS"
+
+      # On secure datanodes, user to run the datanode as after dropping privileges
+      export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER:-{{hadoop_secure_dn_user}}}
+
+      # Extra ssh options.  Empty by default.
+      export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR"
+
+      # Where log files are stored.  $HADOOP_HOME/logs by default.
+      export HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER
+
+      # History server logs
+      export HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER
+
+      # Where log files are stored in the secure data environment.
+      export HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+      # File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
+      # export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+      # host:path where hadoop code should be rsync'd from.  Unset by default.
+      # export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+      # Seconds to sleep between slave commands.  Unset by default.  This
+      # can be useful in large clusters, where, e.g., slave rsyncs can
+      # otherwise arrive faster than the master can service them.
+      # export HADOOP_SLAVE_SLEEP=0.1
+
+      # The directory where pid files are stored. /tmp by default.
+      export HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER
+      export HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER
+
+      # History server pid
+      export HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER
+
+      YARN_RESOURCEMANAGER_OPTS="-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY"
+
+      # A string representing this instance of hadoop. $USER by default.
+      export HADOOP_IDENT_STRING=$USER
+
+      # The scheduling priority for daemon processes.  See 'man nice'.
+
+      # export HADOOP_NICENESS=10
+
+      # Use libraries from standard classpath
+      JAVA_JDBC_LIBS=""
+      #Add libraries required by mysql connector
+      for jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`
+      do
+      JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+      done
+      # Add libraries required by oracle connector
+      for jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`
+      do
+      JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile
+      done
+      # Add libraries required by nodemanager
+      MAPREDUCE_LIBS={{mapreduce_libs_path}}
+      export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}
+
+      # added to the HADOOP_CLASSPATH
+      if [ -d "/usr/hdp/current/tez-client" ]; then
+      if [ -d "/etc/tez/conf/" ]; then
+      # When using versioned RPMs, the tez-client will be a symlink to the current folder of tez in HDP.
+      export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:/usr/hdp/current/tez-client/*:/usr/hdp/current/tez-client/lib/*:/etc/tez/conf/
+      fi
+      fi
+
+      if [ -f "/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar" ]; then
+        export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+      fi
+
+      # Setting path to hdfs command line
+      export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
+
+      # Mostly required for hadoop 2.0
+      export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
+
+      export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
+    </value>
   </property>
+
 </configuration>