You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by vb...@apache.org on 2015/03/20 15:52:49 UTC

ambari git commit: AMBARI-9994. Add Mahout to HDP Stack.(vbrodetskyi)

Repository: ambari
Updated Branches:
  refs/heads/trunk 91926434a -> ea8e32ff1


AMBARI-9994. Add Mahout to HDP Stack.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ea8e32ff
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ea8e32ff
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ea8e32ff

Branch: refs/heads/trunk
Commit: ea8e32ff1e9501f7cf4254e603959f676fdbf932
Parents: 9192643
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Mar 20 16:51:50 2015 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Mar 20 16:51:50 2015 +0200

----------------------------------------------------------------------
 .../java/org/apache/ambari/server/Role.java     |   2 +
 .../ambari/server/metadata/ActionMetadata.java  |   1 +
 .../1.0.0.2.3/configuration/mahout-env.xml      |  30 +++
 .../MAHOUT/1.0.0.2.3/metainfo.xml               |  68 +++++
 .../MAHOUT/1.0.0.2.3/package/scripts/mahout.py  |  34 +++
 .../1.0.0.2.3/package/scripts/mahout_client.py  |  52 ++++
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  71 +++++
 .../1.0.0.2.3/package/scripts/service_check.py  |  93 +++++++
 .../stacks/HDP/2.3/role_command_order.json      |   8 +
 .../stacks/HDP/2.3/services/MAHOUT/metainfo.xml |  26 ++
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |  43 +++
 .../2.3/MAHOUT/test_mahout_service_check.py     |  86 ++++++
 .../test/python/stacks/2.3/configs/default.json | 261 +++++++++++++++++++
 13 files changed, 775 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/java/org/apache/ambari/server/Role.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/Role.java b/ambari-server/src/main/java/org/apache/ambari/server/Role.java
index 59ae001..32c025c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/Role.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/Role.java
@@ -80,6 +80,8 @@ public class Role {
   public static final Role PEERSTATUS = valueOf("PEERSTATUS");
   public static final Role PIG = valueOf("PIG");
   public static final Role PIG_SERVICE_CHECK = valueOf("PIG_SERVICE_CHECK");
+  public static final Role MAHOUT = valueOf("MAHOUT");
+  public static final Role MAHOUT_SERVICE_CHECK = valueOf("MAHOUT_SERVICE_CHECK");
   public static final Role RESOURCEMANAGER = valueOf("RESOURCEMANAGER");
   public static final Role SECONDARY_NAMENODE = valueOf("SECONDARY_NAMENODE");
   public static final Role SQOOP = valueOf("SQOOP");

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
index b9d2d0c..587f806 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
@@ -68,6 +68,7 @@ public class ActionMetadata {
     serviceClients.put("hcat"       , Role.HCAT.toString());
     serviceClients.put("oozie"      , Role.OOZIE_CLIENT.toString());
     serviceClients.put("pig"        , Role.PIG.toString());
+    serviceClients.put("mahout"     , Role.MAHOUT.toString());
     serviceClients.put("sqoop"      , Role.SQOOP.toString());
     serviceClients.put("yarn"       , Role.YARN_CLIENT.toString());
     serviceClients.put("kerberos"   , Role.KERBEROS_CLIENT.toString());

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/configuration/mahout-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/configuration/mahout-env.xml b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/configuration/mahout-env.xml
new file mode 100644
index 0000000..da19a0b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/configuration/mahout-env.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+    <property>
+        <name>mahout_user</name>
+        <value>mahout</value>
+        <property-type>USER</property-type>
+        <description>Mahout user</description>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/metainfo.xml b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/metainfo.xml
new file mode 100644
index 0000000..15ede51
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/metainfo.xml
@@ -0,0 +1,68 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+    <schemaVersion>2.0</schemaVersion>
+    <services>
+        <service>
+            <name>MAHOUT</name>
+            <displayName>Mahout</displayName>
+            <comment>Project of the Apache Software Foundation to produce free implementations of distributed or
+                otherwise scalable machine learning algorithms focused primarily in the areas of collaborative
+                filtering, clustering and classification</comment>
+            <version>1.0.0.2.3</version>
+            <components>
+                <component>
+                    <name>MAHOUT</name>
+                    <displayName>Mahout</displayName>
+                    <category>CLIENT</category>
+                    <cardinality>0+</cardinality>
+                    <versionAdvertised>true</versionAdvertised>
+                    <commandScript>
+                        <script>scripts/mahout_client.py</script>
+                        <scriptType>PYTHON</scriptType>
+                        <timeout>1200</timeout>
+                    </commandScript>
+                </component>
+            </components>
+            <osSpecifics>
+                <osSpecific>
+                    <osFamily>any</osFamily>
+                    <packages>
+                        <package>
+                            <name>mahout</name>
+                        </package>
+                    </packages>
+                </osSpecific>
+            </osSpecifics>
+
+            <commandScript>
+                <script>scripts/service_check.py</script>
+                <scriptType>PYTHON</scriptType>
+                <timeout>300</timeout>
+            </commandScript>
+
+            <requiredServices>
+                <service>YARN</service>
+            </requiredServices>
+
+            <configuration-dependencies>
+            </configuration-dependencies>
+
+        </service>
+    </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py
new file mode 100644
index 0000000..ba10035
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout.py
@@ -0,0 +1,34 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+
+from resource_management import *
+
+def mahout():
+  import params
+
+  Directory( params.mahout_conf_dir,
+             recursive = True,
+             owner = params.mahout_user,
+             group = params.user_group
+  )
+
+  # TODO add configs creation

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
new file mode 100644
index 0000000..f304e02
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
@@ -0,0 +1,52 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import sys
+from resource_management import *
+from mahout import mahout
+
+
+class MahoutClient(Script):
+
+  def get_stack_to_component(self):
+    return {"HDP": "mahout"}
+
+  def pre_rolling_restart(self, env):
+    import params
+    env.set_params(params)
+
+    Execute(('hdp-select', 'set', 'mahout', params.version),
+            sudo = True)
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    mahout()
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+  MahoutClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
new file mode 100644
index 0000000..8efea99
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -0,0 +1,71 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management import *
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+stack_name = default("/hostLevelParams/stack_name", None)
+
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+#mahout params
+mahout_conf_dir = "/etc/mahout/conf"
+mahout_home = "/usr/hdp/current/mahout-client"
+mahout_user = config['configurations']['mahout-env']['mahout_user']
+
+#hadoop params
+hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+hadoop_home = '/usr/hdp/current/hadoop-client'
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+user_group = config['configurations']['cluster-env']['user_group']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+kinit_path_local = functions.get_kinit_path()
+
+# not supporting 32 bit jdk.
+java64_home = config['hostLevelParams']['java_home']
+
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
new file mode 100644
index 0000000..e01cf47
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -0,0 +1,93 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+
+class MahoutServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+
+    create_input_dir_cmd = format("fs -mkdir /user/{smokeuser}/mahoutsmokeinput")
+    copy_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
+    mahout_command = format("mahout seqdirectory --input /user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
+                            "--output /user/{smokeuser}/mahoutsmokeoutput/ --charset utf-8")
+    test_command = format("fs -test -e /user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
+    remove_output_input_dirs_cmd = format("fs -rm -r -f /user/{smokeuser}/mahoutsmokeoutput "
+                                          "/user/{smokeuser}/mahoutsmokeinput")
+
+    ExecuteHadoop( remove_output_input_dirs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   # for kinit run
+                   keytab = params.smoke_user_keytab,
+                   principal = params.smokeuser_principal,
+                   security_enabled = params.security_enabled,
+                   kinit_path_local = params.kinit_path_local,
+                   bin_dir = params.hadoop_bin_dir
+                   )
+
+    ExecuteHadoop( create_input_dir_cmd,
+                 tries = 3,
+                 try_sleep = 5,
+                 user = params.smokeuser,
+                 conf_dir = params.hadoop_conf_dir,
+                 bin_dir = params.hadoop_bin_dir
+    )
+
+    File( format("{tmp_dir}/sample-mahout-test.txt"),
+        content = "Test text which will be converted to sequence file.",
+        mode = 0755
+    )
+
+    ExecuteHadoop( copy_file_to_hdfs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    Execute( mahout_command,
+             tries = 3,
+             try_sleep = 5,
+             environment={'HADOOP_HOME': params.hadoop_home,'HADOOP_CONF_DIR': params.hadoop_conf_dir,
+                          'MAHOUT_HOME': params.mahout_home,'JAVA_HOME': params.java64_home},
+             path = format('/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
+             user = params.smokeuser
+    )
+
+    ExecuteHadoop( test_command,
+                   tries = 10,
+                   try_sleep = 6,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+
+if __name__ == "__main__":
+  MahoutServiceCheck().execute()
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
new file mode 100644
index 0000000..ead3dd4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/role_command_order.json
@@ -0,0 +1,8 @@
+{
+  "_comment" : "Record format:",
+  "_comment" : "blockedRole-blockedCommand: [blockerRole1-blockerCommand1, blockerRole2-blockerCommand2, ...]",
+  "general_deps" : {
+    "_comment" : "dependencies for all cases",
+    "MAHOUT_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/main/resources/stacks/HDP/2.3/services/MAHOUT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/MAHOUT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/MAHOUT/metainfo.xml
new file mode 100644
index 0000000..6c4ef96
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/MAHOUT/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+    <schemaVersion>2.0</schemaVersion>
+    <services>
+        <service>
+            <name>MAHOUT</name>
+            <extends>common-services/MAHOUT/1.0.0.2.3</extends>
+        </service>
+    </services>
+</metainfo>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
new file mode 100644
index 0000000..b74c4e4
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+import json
+
+class TestMahoutClient(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "MAHOUT/1.0.0.2.3/package"
+  STACK_VERSION = "2.3"
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/mahout_client.py",
+                       classname = "MahoutClient",
+                       command = "configure",
+                       config_file="default.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    self.assertResourceCalled('Directory', '/etc/mahout/conf',
+                              owner = 'mahout',
+                              group = 'hadoop',
+                              recursive = True,
+                              )
+    self.assertNoMoreResources()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
new file mode 100644
index 0000000..5e09ad2
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+class TestMahoutClient(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "MAHOUT/1.0.0.2.3/package"
+  STACK_VERSION = "2.3"
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
+                       classname = "MahoutServiceCheck",
+                       command = "service_check",
+                       config_file="default.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              try_sleep = 5,
+                              kinit_path_local = '/usr/bin/kinit',
+                              tries = 3,
+                              user = 'ambari-qa',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              principal = UnknownConfigurationMock(),
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /user/ambari-qa/mahoutsmokeinput',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertResourceCalled('File', '/tmp/sample-mahout-test.txt',
+                              content = 'Test text which will be converted to sequence file.',
+                              mode = 0755,
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-mahout-test.txt /user/ambari-qa/mahoutsmokeinput/',
+                              try_sleep = 5,
+                              tries = 3,
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
+                                         'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
+                                         '--charset utf-8',
+                              environment = {'HADOOP_CONF_DIR': '/etc/hadoop/conf',
+                                             'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+                                             'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
+                                             'MAHOUT_HOME': '/usr/hdp/current/mahout-client'},
+                              path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries = 3,
+                              user = 'ambari-qa',
+                              try_sleep = 5,
+                              )
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mahoutsmokeoutput/_SUCCESS',
+                              try_sleep = 6,
+                              tries = 10,
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              user = 'ambari-qa',
+                              conf_dir = '/etc/hadoop/conf',
+                              )
+    self.assertNoMoreResources()
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ea8e32ff/ambari-server/src/test/python/stacks/2.3/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/configs/default.json b/ambari-server/src/test/python/stacks/2.3/configs/default.json
new file mode 100644
index 0000000..37c69d5
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/configs/default.json
@@ -0,0 +1,261 @@
+{
+    "roleCommand": "SERVICE_CHECK",
+    "clusterName": "c1",
+    "hostname": "c6401.ambari.apache.org",
+    "hostLevelParams": {
+        "jdk_location": "http://c6401.ambari.apache.org:8080/resources/",
+        "ambari_db_rca_password": "mapred",
+        "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+        "jce_name": "UnlimitedJCEPolicyJDK7.zip",
+        "stack_version": "2.3",
+        "stack_name": "HDP",
+        "ambari_db_rca_driver": "org.postgresql.Driver",
+        "jdk_name": "jdk-7u67-linux-x64.tar.gz",
+        "ambari_db_rca_username": "mapred",
+        "java_home": "/usr/jdk64/jdk1.7.0_45",
+        "db_name": "ambari"
+    },
+    "commandType": "EXECUTION_COMMAND",
+    "roleParams": {},
+    "serviceName": "SLIDER",
+    "role": "SLIDER",
+    "commandParams": {
+        "version": "2.2.1.0-2067",
+        "command_timeout": "300",
+        "service_package_folder": "OOZIE",
+        "script_type": "PYTHON",
+        "script": "scripts/service_check.py",
+        "excluded_hosts": "host1,host2"
+    },
+    "taskId": 152,
+    "public_hostname": "c6401.ambari.apache.org",
+    "configurations": {
+        "slider-client": {
+            "slider.yarn.queue": "default"
+        },
+        "mahout-env": {
+             "mahout_user": "mahout"
+        },
+        "hadoop-env": {
+             "hdfs_user": "hdfs"
+        },
+        "core-site": {
+            "fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
+        },
+        "hdfs-site": {
+            "a": "b"
+        },
+        "yarn-site": {
+            "yarn.application.classpath": "/etc/hadoop/conf,/usr/lib/hadoop/*,/usr/lib/hadoop/lib/*,/usr/lib/hadoop-hdfs/*,/usr/lib/hadoop-hdfs/lib/*,/usr/lib/hadoop-yarn/*,/usr/lib/hadoop-yarn/lib/*,/usr/lib/hadoop-mapreduce/*,/usr/lib/hadoop-mapreduce/lib/*",
+            "yarn.resourcemanager.address": "c6401.ambari.apache.org:8050",
+            "yarn.resourcemanager.scheduler.address": "c6401.ambari.apache.org:8030"
+        },
+        "cluster-env": {
+            "security_enabled": "false",
+            "ignore_groupsusers_create": "false",
+            "smokeuser": "ambari-qa",
+            "kerberos_domain": "EXAMPLE.COM",
+            "user_group": "hadoop"
+        },
+        "webhcat-site": {
+            "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",
+            "templeton.pig.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/pig.tar.gz",
+            "templeton.hive.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/hive.tar.gz",
+            "templeton.sqoop.archive": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz",
+            "templeton.streaming.jar": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mr/hadoop-streaming.jar"
+        },
+        "slider-log4j": {
+            "content": "log4jproperties\nline2"
+        },
+        "slider-env": {
+            "content": "envproperties\nline2"
+        },
+      "gateway-site": {
+        "java.security.auth.login.config": "/etc/knox/conf/krb5JAASLogin.conf",
+        "gateway.hadoop.kerberos.secured": "false",
+        "gateway.gateway.conf.dir": "deployments",
+        "gateway.path": "gateway",
+        "sun.security.krb5.debug": "true",
+        "java.security.krb5.conf": "/etc/knox/conf/krb5.conf",
+        "gateway.port": "8443"
+      },
+
+      "users-ldif": {
+        "content": "\n            # Licensed to the Apache Software Foundation (ASF) under one\n            # or more contributor license agreements.  See the NOTICE file\n            # distributed with this work for additional information\n            # regarding copyright ownership.  The ASF licenses this file\n            # to you under the Apache License, Version 2.0 (the\n            # \"License\"); you may not use this file except in compliance\n            # with the License.  You may obtain a copy of the License at\n            #\n            #     http://www.apache.org/licenses/LICENSE-2.0\n            #\n            # Unless required by applicable law or agreed to in writing, software\n            # distributed under the License is distributed on an \"AS IS\" BASIS,\n            # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n            # See the License for the specific language governing permissions and\n            # limitations under the Li
 cense.\n\n            version: 1\n\n            # Please replace with site specific values\n            dn: dc=hadoop,dc=apache,dc=org\n            objectclass: organization\n            objectclass: dcObject\n            o: Hadoop\n            dc: hadoop\n\n            # Entry for a sample people container\n            # Please replace with site specific values\n            dn: ou=people,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass:organizationalUnit\n            ou: people\n\n            # Entry for a sample end user\n            # Please replace with site specific values\n            dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass:person\n            objectclass:organizationalPerson\n            objectclass:inetOrgPerson\n            cn: Guest\n            sn: User\n            uid: guest\n            userPassword:guest-password\n\n            # entry for sample user admin\n            dn
 : uid=admin,ou=people,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass:person\n            objectclass:organizationalPerson\n            objectclass:inetOrgPerson\n            cn: Admin\n            sn: Admin\n            uid: admin\n            userPassword:admin-password\n\n            # entry for sample user sam\n            dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass:person\n            objectclass:organizationalPerson\n            objectclass:inetOrgPerson\n            cn: sam\n            sn: sam\n            uid: sam\n            userPassword:sam-password\n\n            # entry for sample user tom\n            dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass:person\n            objectclass:organizationalPerson\n            objectclass:inetOrgPerson\n            cn: tom\n            sn: tom\n            uid: tom\n            userPassw
 ord:tom-password\n\n            # create FIRST Level groups branch\n            dn: ou=groups,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass:organizationalUnit\n            ou: groups\n            description: generic groups branch\n\n            # create the analyst group under groups\n            dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass: groupofnames\n            cn: analyst\n            description:analyst  group\n            member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n            member: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n\n\n            # create the scientist group under groups\n            dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org\n            objectclass:top\n            objectclass: groupofnames\n            cn: scientist\n            description: scientist group\n            member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org"
+      },
+
+      "topology": {
+        "content": "\n        <topology>\n\n            <gateway>\n\n                <provider>\n                    <role>authentication</role>\n                    <name>ShiroProvider</name>\n                    <enabled>true</enabled>\n                    <param>\n                        <name>sessionTimeout</name>\n                        <value>30</value>\n                    </param>\n                    <param>\n                        <name>main.ldapRealm</name>\n                        <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n                    </param>\n                    <param>\n                        <name>main.ldapRealm.userDnTemplate</name>\n                        <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n                    </param>\n                    <param>\n                        <name>main.ldapRealm.contextFactory.url</name>\n                        <value>ldap://{{knox_host_name}}:33389</value>\n               
      </param>\n                    <param>\n                        <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n                        <value>simple</value>\n                    </param>\n                    <param>\n                        <name>urls./**</name>\n                        <value>authcBasic</value>\n                    </param>\n                </provider>\n\n                <provider>\n                    <role>identity-assertion</role>\n                    <name>Default</name>\n                    <enabled>true</enabled>\n                </provider>\n\n            </gateway>\n\n            <service>\n                <role>NAMENODE</role>\n                <url>hdfs://{{namenode_host}}:{{namenode_rpc_port}}</url>\n            </service>\n\n            <service>\n                <role>JOBTRACKER</role>\n                <url>rpc://{{rm_host}}:{{jt_rpc_port}}</url>\n            </service>\n\n            <service>\n                <role>WEBHDFS</ro
 le>\n                <url>http://{{namenode_host}}:{{namenode_http_port}}/webhdfs</url>\n            </service>\n\n            <service>\n                <role>WEBHCAT</role>\n                <url>http://{{webhcat_server_host}}:{{templeton_port}}/templeton</url>\n            </service>\n\n            <service>\n                <role>OOZIE</role>\n                <url>http://{{oozie_server_host}}:{{oozie_server_port}}/oozie</url>\n            </service>\n\n            <service>\n                <role>WEBHBASE</role>\n                <url>http://{{hbase_master_host}}:{{hbase_master_port}}</url>\n            </service>\n\n            <service>\n                <role>HIVE</role>\n                <url>http://{{hive_server_host}}:{{hive_http_port}}/{{hive_http_path}}</url>\n            </service>\n\n            <service>\n                <role>RESOURCEMANAGER</role>\n                <url>http://{{rm_host}}:{{rm_port}}/ws</url>\n            </service>\n        </topology>"
+      },
+
+      "ldap-log4j": {
+        "content": "\n        # Licensed to the Apache Software Foundation (ASF) under one\n        # or more contributor license agreements.  See the NOTICE file\n        # distributed with this work for additional information\n        # regarding copyright ownership.  The ASF licenses this file\n        # to you under the Apache License, Version 2.0 (the\n        # \"License\"); you may not use this file except in compliance\n        # with the License.  You may obtain a copy of the License at\n        #\n        #     http://www.apache.org/licenses/LICENSE-2.0\n        #\n        # Unless required by applicable law or agreed to in writing, software\n        # distributed under the License is distributed on an \"AS IS\" BASIS,\n        # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n        # See the License for the specific language governing permissions and\n        # limitations under the License.\n        #testing\n\n        app.log.dir=${launcher.d
 ir}/../logs\n        app.log.file=${launcher.name}.log\n\n        log4j.rootLogger=ERROR, drfa\n        log4j.logger.org.apache.directory.server.ldap.LdapServer=INFO\n        log4j.logger.org.apache.directory=WARN\n\n        log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n        log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n        log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n        log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n        log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n        log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n        log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n        log4j.appender.drfa.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n"
+      },
+
+      "gateway-log4j": {
+        "content": "\n\n      # Licensed to the Apache Software Foundation (ASF) under one\n      # or more contributor license agreements. See the NOTICE file\n      # distributed with this work for additional information\n      # regarding copyright ownership. The ASF licenses this file\n      # to you under the Apache License, Version 2.0 (the\n      # \"License\"); you may not use this file except in compliance\n      # with the License. You may obtain a copy of the License at\n      #\n      # http://www.apache.org/licenses/LICENSE-2.0\n      #\n      # Unless required by applicable law or agreed to in writing, software\n      # distributed under the License is distributed on an \"AS IS\" BASIS,\n      # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n      # See the License for the specific language governing permissions and\n      # limitations under the License.\n\n      app.log.dir=${launcher.dir}/../logs\n      app.log.file=${launcher.name}.log\n 
      app.audit.file=${launcher.name}-audit.log\n\n      log4j.rootLogger=ERROR, drfa\n\n      log4j.logger.org.apache.hadoop.gateway=INFO\n      #log4j.logger.org.apache.hadoop.gateway=DEBUG\n\n      #log4j.logger.org.eclipse.jetty=DEBUG\n      #log4j.logger.org.apache.shiro=DEBUG\n      #log4j.logger.org.apache.http=DEBUG\n      #log4j.logger.org.apache.http.client=DEBUG\n      #log4j.logger.org.apache.http.headers=DEBUG\n      #log4j.logger.org.apache.http.wire=DEBUG\n\n      log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n      log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n      log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n      log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n      log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n      log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n      log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n      log4j.appender.drfa.layout.ConversionPattern
 =%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\n\n      log4j.logger.audit=INFO, auditfile\n      log4j.appender.auditfile=org.apache.log4j.DailyRollingFileAppender\n      log4j.appender.auditfile.File=${app.log.dir}/${app.audit.file}\n      log4j.appender.auditfile.Append = true\n      log4j.appender.auditfile.DatePattern = '.'yyyy-MM-dd\n      log4j.appender.auditfile.layout = org.apache.hadoop.gateway.audit.log4j.layout.AuditLayout"
+      },
+      "knox-env": {
+        "knox_master_secret": "sa",
+        "knox_group": "knox",
+        "knox_pid_dir": "/var/run/knox",
+        "knox_user": "knox"
+      },
+      "kafka-env": {
+        "content": "\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin",
+        "kafka_user": "kafka",
+        "kafka_log_dir": "/var/log/kafka",
+        "kafka_pid_dir": "/var/run/kafka"
+      },
+      "kafka-log4j": {
+        "content": "\n#\n#\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements.  See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership.  The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License.  You may obtain a copy of the License at\n#\n#   http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied.  See the License for the\n# specific language governing permissions and limitations\n# under the License.\n#\n#\n#\nkafka.logs.dir=logs\n\nlog4j.rootLogger=INFO, stdout\n\nlog4j.appender.stdout=org.apache.log4j.ConsoleAppender\nlog4j.appender.stdout.layout=org.apache.log
 4j.PatternLayout\nlog4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log\nlog4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log\nlog4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-requ
 est.log\nlog4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log\nlog4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\nlog4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender\nlog4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH\nlog4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log\nlog4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout\nlog4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n\n\n# Turn on all our debugging info\n#log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG, kafkaAppender\n#log4j.log
 ger.kafka.client.ClientUtils=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf=DEBUG, kafkaAppender\n#log4j.logger.kafka.perf.ProducerPerformance$ProducerThread=DEBUG, kafkaAppender\n#log4j.logger.org.I0Itec.zkclient.ZkClient=DEBUG\nlog4j.logger.kafka=INFO, kafkaAppender\nlog4j.logger.kafka.network.RequestChannel$=WARN, requestAppender\nlog4j.additivity.kafka.network.RequestChannel$=false\n\n#log4j.logger.kafka.network.Processor=TRACE, requestAppender\n#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender\n#log4j.additivity.kafka.server.KafkaApis=false\nlog4j.logger.kafka.request.logger=WARN, requestAppender\nlog4j.additivity.kafka.request.logger=false\n\nlog4j.logger.kafka.controller=TRACE, controllerAppender\nlog4j.additivity.kafka.controller=false\n\nlog4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender\nlog4j.additivity.kafka.log.LogCleaner=false\n\nlog4j.logger.state.change.logger=TRACE, stateChangeAppender\nlog4j.additivity.state.change.logger=false"
+      },
+      "kafka-broker": {
+        "log.segment.bytes": "1073741824",
+        "socket.send.buffer.bytes": "102400",
+        "num.network.threads": "3",
+        "log.flush.scheduler.interval.ms": "3000",
+        "kafka.ganglia.metrics.host": "localhost",
+        "zookeeper.session.timeout.ms": "6000",
+        "replica.lag.time.max.ms": "10000",
+        "num.io.threads": "8",
+        "kafka.ganglia.metrics.group": "kafka",
+        "replica.lag.max.messages": "4000",
+        "port": "6667",
+        "log.retention.bytes": "-1",
+        "fetch.purgatory.purge.interval.requests": "10000",
+        "producer.purgatory.purge.interval.requests": "10000",
+        "default.replication.factor": "1",
+        "replica.high.watermark.checkpoint.interval.ms": "5000",
+        "zookeeper.connect": "c6402.ambari.apache.org:2181",
+        "controlled.shutdown.retry.backoff.ms": "5000",
+        "num.partitions": "1",
+        "log.flush.interval.messages": "10000",
+        "replica.fetch.min.bytes": "1",
+        "queued.max.requests": "500",
+        "controlled.shutdown.max.retries": "3",
+        "replica.fetch.wait.max.ms": "500",
+        "controlled.shutdown.enable": "false",
+        "log.roll.hours": "168",
+        "log.cleanup.interval.mins": "10",
+        "replica.socket.receive.buffer.bytes": "65536",
+        "zookeeper.connection.timeout.ms": "6000",
+        "replica.fetch.max.bytes": "1048576",
+        "num.replica.fetchers": "1",
+        "socket.request.max.bytes": "104857600",
+        "message.max.bytes": "1000000",
+        "zookeeper.sync.time.ms": "2000",
+        "socket.receive.buffer.bytes": "102400",
+        "controller.message.queue.size": "10",
+        "log.flush.interval.ms": "3000",
+        "log.dirs": "/tmp/log/dir",
+        "controller.socket.timeout.ms": "30000",
+        "replica.socket.timeout.ms": "30000",
+        "auto.create.topics.enable": "true",
+        "log.index.size.max.bytes": "10485760",
+        "kafka.ganglia.metrics.port": "8649",
+        "log.index.interval.bytes": "4096",
+        "log.retention.hours": "168"
+      },
+      "ranger-hbase-plugin-properties": {
+            "ranger-hbase-plugin-enabled":"yes"
+      },
+      "ranger-hive-plugin-properties": {
+            "ranger-hive-plugin-enabled":"yes"
+       }
+    },
+    "configuration_attributes": {
+        "yarn-site": {
+            "final": {
+                "yarn.nodemanager.disk-health-checker.min-healthy-disks": "true",
+                "yarn.nodemanager.container-executor.class": "true",
+                "yarn.nodemanager.local-dirs": "true"
+            }
+        },
+        "hdfs-site": {
+            "final": {
+                "dfs.web.ugi": "true",
+                "dfs.support.append": "true",
+                "dfs.cluster.administrators": "true"
+            }
+        },
+        "core-site": {
+            "final": {
+                "hadoop.proxyuser.hive.groups": "true",
+                "webinterface.private.actions": "true",
+                "hadoop.proxyuser.oozie.hosts": "true"
+            }
+        },
+      "knox-env": {},
+      "gateway-site": {},
+      "users-ldif": {},
+      "kafka-env": {},
+      "kafka-log4j": {},
+      "kafka-broker": {}
+    },
+    "configurationTags": {
+        "slider-client": {
+            "tag": "version1"
+        },
+        "slider-log4j": {
+            "tag": "version1"
+        },
+        "slider-env": {
+            "tag": "version1"
+        },
+        "core-site": {
+            "tag": "version1"
+        },
+        "hdfs-site": {
+            "tag": "version1"
+        },
+        "yarn-site": {
+            "tag": "version1"
+        },
+      "gateway-site": {
+        "tag": "version1"
+      },
+      "topology": {
+        "tag": "version1"
+      },
+      "users-ldif": {
+        "tag": "version1"
+      },
+      "kafka-env": {
+        "tag": "version1"
+      },
+      "kafka-log4j": {
+        "tag": "version1"
+      },
+      "kafka-broker": {
+        "tag": "version1"
+      }
+    },
+    "commandId": "7-1",
+    "clusterHostInfo": {
+        "ambari_server_host": [
+            "c6401.ambari.apache.org"
+        ],
+        "all_ping_ports": [
+            "8670",
+            "8670"
+        ],
+        "rm_host": [
+            "c6402.ambari.apache.org"
+        ],
+        "all_hosts": [
+            "c6401.ambari.apache.org",
+            "c6402.ambari.apache.org"
+        ],
+      "knox_gateway_hosts": [
+        "jaimin-knox-1.c.pramod-thangali.internal"
+      ],
+      "kafka_broker_hosts": [
+        "c6401.ambari.apache.org"
+      ],
+       "zookeeper_hosts": [
+         "c6401.ambari.apache.org"
+        ]
+
+}
+}