You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sc...@apache.org on 2014/12/11 19:53:05 UTC

[5/7] ambari git commit: AMBARI-8433 Enable HDP 2.2.GlusterFS stack

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/metrics.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/metrics.json b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/metrics.json
new file mode 100644
index 0000000..826c0e8
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/metrics.json
@@ -0,0 +1,262 @@
+{
+  "KAFKA_BROKER": {
+    "Component": [
+      {
+        "type": "ganglia",
+        "metrics": {
+          "metrics/jvm/uptime":{
+            "metric":"jvm.uptime",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/heap_usage":{
+            "metric":"jvm.heap_usage",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/non_heap_usage":{
+            "metric":"jvm.non_heap_usage",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/thread-states/runnable":{
+            "metric":"jvm.thread-states.runnable",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/thread-states/blocked":{
+            "metric":"jvm.thread-states.blocked",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/thread-states/timed_waiting":{
+            "metric":"jvm.thread-states.timed_waiting",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/thread-states/terminated":{
+            "metric":"jvm.thread-states.terminated",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/thread_count":{
+            "metric":"jvm.thread_count",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/jvm/daemon_thread_count":{
+            "metric":"jvm.daemon_thread_count",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsMessagesInPerSec/1MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsMessagesInPerSec.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsMessagesInPerSec/5MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsMessagesInPerSec.5MinuteRate",
+            "pointInTime": false,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsMessagesInPerSec/15MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsMessagesInPerSec.15MinuteRate",
+            "pointInTime": false,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsMessagesInPerSec/meanRate": {
+            "metric": "kafka.server.BrokerTopicMetrics/AllTopicsMessagesInPerSec/meanRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsMessagesInPerSec/count": {
+            "metric": "kafka.server.BrokerTopicMetrics/AllTopicsMessagesInPerSec.counte",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesInPerSec/1MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesInPerSec.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesInPerSec/5MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesInPerSec.5MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesInPerSec/15MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesInPerSec.15MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesInPerSec/meanRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesInPerSec.meanRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesInPerSec/count": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesInPerSec.count",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesOutPerSec/1MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesOutPerSec.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesOutPerSec/5MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesOutPerSec.5MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesOutPerSec/15MinuteRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesOutPerSec.15MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesOutPerSec/meanRate": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesOutPerSec.meanRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/server/BrokerTopicMetrics/AllTopicsBytesOutPerSec/count": {
+            "metric": "kafka.server.BrokerTopicMetrics.AllTopicsBytesOutPerSec.count",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/KafkaController/ActiveControllerCount": {
+            "metric": "kafka.controller.KafkaController.ActiveControllerCount",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/log/LogFlushStats/LogFlushRateAndTimeMs/meanRate": {
+            "metric": "kafka.log.LogFlushStats.LogFlushRateAndTimeMs.meanRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/log/LogFlushStats/LogFlushRateAndTimeMs/1MinuteRate": {
+            "metric": "kafka.log.LogFlushStats.LogFlushRateAndTimeMs.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/log/LogFlushStats/LogFlushRateAndTimeMs/5MinuteRate": {
+            "metric": "kafka.log.LogFlushStats.LogFlushRateAndTimeMs.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/log/LogFlushStats/LogFlushRateAndTimeMs/15MinuteRate": {
+            "metric": "kafka.log.LogFlushStats.LogFlushRateAndTimeMs.15MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/log/LogFlushStats/LogFlushRateAndTimeMs/count": {
+            "metric": "kafka.log.LogFlushStats.LogFlushRateAndTimeMs.count",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/LeaderElectionRateAndTimeMs/meanRate": {
+            "metric": "kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.meanRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/LeaderElectionRateAndTimeMs/1MinuteRate": {
+            "metric": "kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/LeaderElectionRateAndTimeMs/5MinuteRate": {
+            "metric": "kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.5MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/LeaderElectionRateAndTimeMs/15MinuteRate": {
+            "metric": "kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.15MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/LeaderElectionRateAndTimeMs/count": {
+            "metric": "kafka.controller.ControllerStats.LeaderElectionRateAndTimeMs.count",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/UncleanLeaderElectionsPerSec/1MinuteRate": {
+            "metric": "kafka.controller.ControllerStats.UncleanLeaderElectionsPerSec.1MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/UncleanLeaderElectionsPerSec/5MinuteRate": {
+            "metric": "kafka.controller.ControllerStats.UncleanLeaderElectionsPerSec.5MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/UncleanLeaderElectionsPerSec/15MinuteRate": {
+            "metric": "kafka.controller.ControllerStats.UncleanLeaderElectionsPerSec.15MinuteRate",
+            "pointInTime": true,
+            "temporal": true
+          },
+          "metrics/kafka/controller/ControllerStats/OfflinePartitionsCount": {
+              "metric": "kafka.controller.ControllerStats.OfflinePartitionsCount",
+              "pointInTime" :true,
+              "temporal": true
+          },
+          "metrics/kafka/server/ReplicaManager/PartitionCount": {
+              "metric": "kafka.server.ReplicaManager.PartitionCount",
+              "pointInTime" : true,
+              "temporal": true
+          },
+          "metrics/kafka/server/ReplicaManager/LeaderCount": {
+              "metric": "kafka.server.ReplicaManager.LeaderCount",
+              "pointInTime" : true,
+              "temporal": true
+          },
+          "metrics/kafka/server/ReplicaManager/UnderReplicatedPartitions": {
+              "metric": "kafka.server.ReplicaManager.UnderReplicatedPartitions",
+              "pointInTime" :true,
+              "temporal": true
+          },
+          "metrics/kafka/server/ReplicaManager/ISRShrinksPerSec": {
+              "metric": "kafka.server.ReplicaManager.ISRShrinksPerSec",
+              "pointInTime" : true,
+              "temporal": true
+          },
+          "metrics/kafka/server/ReplicaManager/ISRExpandsPerSec": {
+              "metric": "kafka.server.ReplicaManager.ISRExpandsPerSec",
+              "pointInTime" : true,
+              "temporal": true
+          },
+
+          "metrics/kafka/server/ReplicaFetcherManager/Replica-MaxLag": {
+              "metric": "kafka.server.ReplicaFetcherManager.Replica-MaxLag",
+              "pointInTime" : true,
+              "temporal": true
+          },
+          "metrics/kafka/server/ProducerRequestPurgatory/PurgatorySize": {
+              "metric": "kafka.server.ProducerRequestPurgatory.PurgatorySize",
+              "pointInTime" : true,
+              "temporal": true
+          },
+          "metrics/kafka/server/FetchRequestPurgatory/PurgatorySize": {
+              "metric": "kafka.server.FetchRequestPurgatory.PurgatorySize",
+              "pointInTime" : true,
+              "temporal": true
+          },
+          "metrics/kafka/cluster/Partition/$1-UnderReplicated":{
+            "metric":"kafka.cluster.Partition.(\\w+)-UnderReplicated",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/kafka/consumer/ConsumerFetcherManager/$1-MaxLag":{
+            "metric":"kafka.consumer.ConsumerFetcherManager.(\\w+)-MaxLag",
+            "pointInTime":true,
+            "temporal":true
+          },
+          "metrics/kafka/consumer/ConsumerFetcherManager/$1-MinFetch":{
+            "metric":"kafka.consumer.ConsumerFetcherManager.(\\w+)-MinFetch",
+            "pointInTime":true,
+            "temporal":true
+          }
+        }
+      }
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka.py
new file mode 100644
index 0000000..c0231a8
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from properties_config import properties_config
+import sys
+from copy import deepcopy
+
+def kafka():
+    import params
+
+    Directory([params.log_dir, params.pid_dir, params.conf_dir],
+              owner=params.kafka_user,
+              group=params.user_group,
+              recursive=True
+          )
+    brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
+    kafka_server_config = mutable_config_dict(params.config['configurations']['kafka-broker'])
+    kafka_server_config['broker.id'] = brokerid
+    kafka_server_config['host.name'] = params.hostname
+    kafka_data_dir = kafka_server_config['log.dirs']
+    Directory(filter(None,kafka_data_dir.split(",")),
+              owner=params.kafka_user,
+              group=params.user_group,
+              recursive=True)
+
+    conf_dir = params.conf_dir
+    properties_config("server.properties",
+                      conf_dir=params.conf_dir,
+                      configurations=kafka_server_config,
+                      owner=params.kafka_user,
+                      group=params.user_group,
+                      brokerid=brokerid)
+
+    File(format("{conf_dir}/kafka-env.sh"),
+          owner=params.kafka_user,
+          content=InlineTemplate(params.kafka_env_sh_template)
+     )
+
+    if (params.log4j_props != None):
+        File(format("{conf_dir}/log4j.properties"),
+             mode=0644,
+             group=params.user_group,
+             owner=params.kafka_user,
+             content=params.log4j_props
+         )
+
+
+def mutable_config_dict(kafka_broker_config):
+    kafka_server_config = {}
+    for key, value in kafka_broker_config.iteritems():
+        kafka_server_config[key] = value
+    return kafka_server_config

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka_broker.py
new file mode 100644
index 0000000..c79ebb9
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/kafka_broker.py
@@ -0,0 +1,63 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+
+from kafka import kafka
+
+class KafkaBroker(Script):
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    kafka()
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env)
+    daemon_cmd = format('source {params.conf_dir}/kafka-env.sh ; {params.kafka_bin} start')
+    no_op_test = format('ls {params.pid_file} >/dev/null 2>&1 && ps -p `cat {params.pid_file}` >/dev/null 2>&1')
+    Execute(daemon_cmd,
+            user=params.kafka_user,
+            not_if=no_op_test
+    )
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env)
+    daemon_cmd = format('source {params.conf_dir}/kafka-env.sh; {params.kafka_bin} stop')
+    Execute(daemon_cmd,
+            user=params.kafka_user,
+    )
+    Execute (format("rm -f {params.pid_file}"))
+
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    check_process_status(status_params.kafka_pid_file)
+
+if __name__ == "__main__":
+  KafkaBroker().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/params.py
new file mode 100644
index 0000000..83d6d73
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/params.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management import *
+import status_params
+
+# server configurations
+config = Script.get_config()
+
+hdp_stack_version = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(hdp_stack_version)
+stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0
+
+if stack_is_hdp22_or_further:
+    kafka_home = '/usr/hdp/current/kafka-broker/'
+    kafka_bin = kafka_home+'bin/kafka'
+else:
+    kafka_home = '/usr/lib/kafka/'
+    kafka_bin = kafka_home+'/bin/kafka'
+
+
+conf_dir = "/etc/kafka/conf"
+kafka_user = config['configurations']['kafka-env']['kafka_user']
+log_dir = config['configurations']['kafka-env']['kafka_log_dir']
+pid_dir = status_params.kafka_pid_dir
+pid_file = pid_dir+"/kafka.pid"
+hostname = config['hostname']
+user_group = config['configurations']['cluster-env']['user_group']
+java64_home = config['hostLevelParams']['java_home']
+kafka_env_sh_template = config['configurations']['kafka-env']['content']
+kafka_hosts = config['clusterHostInfo']['kafka_broker_hosts']
+kafka_hosts.sort()
+
+zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']
+zookeeper_hosts.sort()
+
+if (('kafka-log4j' in config['configurations']) and ('content' in config['configurations']['kafka-log4j'])):
+    log4j_props = config['configurations']['kafka-log4j']['content']
+else:
+    log4j_props = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/properties_config.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/properties_config.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/properties_config.py
new file mode 100644
index 0000000..56bab2c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/properties_config.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import re
+from resource_management import *
+
+def properties_inline_template(configurations):
+  return source.InlineTemplate('''{% for key, value in configurations_dict.items() %}{{ key }}={{ value }}
+{% endfor %}''', configurations_dict=configurations)
+
+def properties_config(filename, configurations = None, conf_dir = None,
+                      mode = None, owner = None, group = None, brokerid = None):
+    config_content = properties_inline_template(configurations)
+    File (format("{conf_dir}/{filename}"), content = config_content, owner = owner,
+          group = group, mode = mode)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/service_check.py
new file mode 100644
index 0000000..c2b4bc1
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/service_check.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from __future__ import print_function
+from resource_management import *
+import  sys,subprocess,os
+
+class ServiceCheck(Script):
+    def service_check(self, env):
+        import params
+        env.set_params(params)
+        kafka_config=self.read_kafka_config(params.conf_dir)
+        self.set_env(params.conf_dir)
+        create_topic_cmd_created_output = "Created topic \"ambari_kafka_service_check\"."
+        create_topic_cmd_exists_output = "Topic \"ambari_kafka_service_check\" already exists."
+	print("Running kafka create topic command", file=sys.stdout)
+        create_topic_cmd = [params.kafka_home+'/bin/kafka-topics.sh', '--zookeeper '+kafka_config['zookeeper.connect'],
+                            '--create --topic ambari_kafka_service_check', '--partitions 1 --replication-factor 1']
+	print(" ".join(create_topic_cmd), file=sys.stdout)
+        create_topic_process = subprocess.Popen(create_topic_cmd,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
+        out, err = create_topic_process.communicate()
+        if out.find(create_topic_cmd_created_output) != -1:
+	    print(out, file=sys.stdout)
+            sys.exit(0)
+        elif out.find(create_topic_cmd_exists_output) != -1:
+            print("Topic ambari_kafka_service_check exists", file=sys.stdout)
+            sys.exit(0)
+        else:
+	    print(out, file=sys.stderr)
+            sys.exit(1)
+
+    def read_kafka_config(self,kafka_conf_dir):
+        conf_file = open(kafka_conf_dir+"/server.properties","r")
+        kafka_config = {}
+        for line in conf_file:
+            key,value = line.split("=")
+            kafka_config[key] = value.replace("\n","")
+        return kafka_config
+
+    def set_env(self, kafka_conf_dir):
+        command = ['bash', '-c', 'source '+kafka_conf_dir+'/kafka-env.sh && env']
+        proc = subprocess.Popen(command, stdout = subprocess.PIPE)
+        for line in proc.stdout:
+            (key, _, value) = line.partition("=")
+            os.environ[key] = value.replace("\n","")
+        proc.communicate()
+
+if __name__ == "__main__":
+    ServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/status_params.py
new file mode 100644
index 0000000..fcb0816
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KAFKA/package/scripts/status_params.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+
+kafka_pid_dir = config['configurations']['kafka-env']['kafka_pid_dir']
+kafka_pid_file = format("{kafka_pid_dir}/kafka.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kadm5-acl.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kadm5-acl.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kadm5-acl.xml
new file mode 100644
index 0000000..293bcbf
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kadm5-acl.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<configuration>
+  <property>
+    <name>conf_dir</name>
+    <description>The kadm.acl configuration directory</description>
+    <value>/var/kerberos/krb5kdc</value>
+  </property>
+  <property>
+    <name>content</name>
+    <description>The jinja template for the kadm5.acl file</description>
+    <value>
+      */admin@{{realm}}	*
+
+      {# Append additional realm declarations below #}
+    </value>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kdc-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kdc-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kdc-conf.xml
new file mode 100644
index 0000000..ac41317
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/kdc-conf.xml
@@ -0,0 +1,57 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <property>
+    <name>kdcdefaults_kdc_ports</name>
+    <value>88</value>
+  </property>
+  <property>
+    <name>kdcdefaults_kdc_tcp_ports</name>
+    <value>88</value>
+  </property>
+
+  <property>
+    <name>conf_dir</name>
+    <description>The kdc.conf configuration directory</description>
+    <value>/var/kerberos/krb5kdc</value>
+  </property>
+  <property>
+    <name>content</name>
+    <description>The jinja template for the kdc.conf file</description>
+    <value>
+      [kdcdefaults]
+        kdc_ports = {{kdcdefaults_kdc_ports}}
+        kdc_tcp_ports = {{kdcdefaults_kdc_tcp_ports}}
+
+      [realms]
+        {{realm}} = {
+          acl_file = {{kadm5_acl_path}}
+          dict_file = /usr/share/dict/words
+          admin_keytab = {{kadm5_acl_dir}}/kadm5.keytab
+          supported_enctypes = {{libdefaults_default_tgs_enctypes}}
+      }
+
+      {# Append additional realm declarations below #}
+    </value>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
new file mode 100644
index 0000000..8e6b9ba
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/configuration/krb5-conf.xml
@@ -0,0 +1,186 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+  <property>
+    <name>logging_default</name>
+    <value>FILE:/var/log/krb5libs.log</value>
+  </property>
+  <property>
+    <name>logging_kdc</name>
+    <value>FILE:/var/log/krb5kdc.log</value>
+  </property>
+  <property>
+    <name>logging_admin_server</name>
+    <value>FILE:/var/log/kadmind.log</value>
+  </property>
+
+  <property>
+    <name>libdefaults_dns_lookup_realm</name>
+    <value>false</value>
+  </property>
+  <property>
+    <name>libdefaults_dns_lookup_kdc</name>
+    <value>false</value>
+  </property>
+  <property>
+    <name>libdefaults_ticket_lifetime</name>
+    <value>24h</value>
+  </property>
+  <property>
+    <name>libdefaults_renew_lifetime</name>
+    <value>7d</value>
+  </property>
+  <property>
+    <name>libdefaults_forwardable</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>libdefaults_default_tgs_enctypes</name>
+    <description>
+      a space-delimited list of session key encryption types supported by the KDC or Active
+      Directory
+    </description>
+    <value>
+      aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5
+      camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4
+    </value>
+  </property>
+  <property>
+    <name>libdefaults_default_tkt_enctypes</name>
+    <description>
+      a space-delimited list of session key encryption types supported by the KDC or Active
+      Directory
+    </description>
+    <value>
+      aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 arcfour-hmac-md5
+      camellia256-cts-cmac camellia128-cts-cmac des-cbc-crc des-cbc-md5 des-cbc-md4
+    </value>
+  </property>
+
+  <property require-input="true">
+    <name>realm</name>
+    <description>
+      The realm to use when creating service principals
+    </description>
+    <value/>
+  </property>
+  <property require-input="true">
+    <name>domains</name>
+    <description>
+      A comma-delimited list of domain names that the realm serves (optional)
+    </description>
+    <value/>
+  </property>
+  <property require-input="true">
+    <name>kdc_type</name>
+    <description>
+      The type of KDC being used. Either mit-kdc or active-directory
+    </description>
+    <value>mit-kdc</value>
+  </property>
+  <property require-input="true">
+    <name>kdc_host</name>
+    <description>
+      The IP address or FQDN of the KDC or Active Directory server, optionally a port number may be
+      provided
+    </description>
+    <value/>
+  </property>
+  <property>
+    <name>admin_server_host</name>
+    <description>
+      The IP address or FQDN of the administrative Kerberos server, optionally a port number may be
+      provided
+    </description>
+    <value/>
+  </property>
+  <property>
+    <name>test_principal</name>
+    <description>
+      The principal that may be used to test the Kerberos configuration (this will not be retained)
+    </description>
+    <value/>
+  </property>
+  <property>
+    <name>test_password</name>
+    <description>
+      The password for the administrative principal (either this value or the keytab value is
+      required to be set, neither is expected to be retained)
+    </description>
+    <value/>
+    <property-type>PASSWORD</property-type>
+  </property>
+  <property>
+    <name>test_keytab</name>
+    <description>
+      The base64-encoded keytab for the test principal (either this value or the password
+      value is required to be set, neither is expected to be retained)
+    </description>
+    <value>
+
+    </value>
+  </property>
+
+
+  <property>
+    <name>conf_dir</name>
+    <description>The krb5.conf configuration directory</description>
+    <value>/etc</value>
+  </property>
+  <property>
+    <name>content</name>
+    <description>The jinja template for the krb5.conf file</description>
+    <value>
+[libdefaults]
+  renew_lifetime = {{libdefaults_renew_lifetime}}
+  forwardable = {{libdefaults_forwardable}}
+  default_realm = {{realm|upper()}}
+  ticket_lifetime = {{libdefaults_ticket_lifetime}}
+  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}
+  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}
+
+{% if domains %}
+[domain_realm]
+{% for domain in domains.split(',') %}
+  {{domain}} = {{realm|upper()}}
+{% endfor %}
+{% endif %}
+
+[logging]
+  default = {{logging_default}}
+{#
+# The following options are unused unless a managed KDC is installed
+  admin_server = {{logging_admin_server}}
+  kdc = {{logging_admin_kdc}}
+#}
+
+[realms]
+  {{realm}} = {
+    admin_server = {{admin_server_host|default(kdc_host, True)}}
+    kdc = {{kdc_host}}
+  }
+
+{# Append additional realm declarations below #}
+    </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/metainfo.xml
new file mode 100644
index 0000000..8751892
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/metainfo.xml
@@ -0,0 +1,167 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>KERBEROS</name>
+      <displayName>Kerberos</displayName>
+      <comment>A computer network authentication protocol which works on
+        the basis of 'tickets' to allow nodes communicating over a
+        non-secure network to prove their identity to one another in a
+        secure manner.
+      </comment>
+      <version>1.10.3-10</version>
+
+      <components>
+        <component>
+          <name>KDC_SERVER</name>
+          <displayName>Kerberos KDC</displayName>
+          <category>MASTER</category>
+          <cardinality>0-1</cardinality>
+          <dependencies>
+            <dependency>
+              <name>KERBEROS/KERBEROS_CLIENT</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <commandScript>
+            <script>scripts/kerberos_server.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>900</timeout>
+          </commandScript>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>krb5.conf</fileName>
+              <dictionaryName>krb5-conf</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>kdc.conf</fileName>
+              <dictionaryName>kdc-conf</dictionaryName>
+            </configFile>
+            <configFile>
+              <type>env</type>
+              <fileName>kadm5.acl</fileName>
+              <dictionaryName>kadm5-acl</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
+
+        <component>
+          <name>KERBEROS_CLIENT</name>
+          <displayName>Kerberos Client</displayName>
+          <category>CLIENT</category>
+          <cardinality>ALL</cardinality>
+          <auto-deploy>
+            <enabled>true</enabled>
+          </auto-deploy>
+          <commandScript>
+            <script>scripts/kerberos_client.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <customCommands>
+            <customCommand>
+              <name>SET_KEYTAB</name>
+              <commandScript>
+                <script>scripts/kerberos_client.py</script>
+                <scriptType>PYTHON</scriptType>
+                <timeout>1000</timeout>
+              </commandScript>
+            </customCommand>
+          </customCommands>
+          <configFiles>
+            <configFile>
+              <type>env</type>
+              <fileName>krb5.conf</fileName>
+              <dictionaryName>krb5-conf</dictionaryName>
+            </configFile>
+          </configFiles>
+        </component>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat5,redhat6</osFamily>
+          <packages>
+            <package>
+              <name>krb5-server</name>
+            </package>
+            <package>
+              <name>krb5-libs</name>
+            </package>
+            <package>
+              <name>krb5-workstation</name>
+            </package>
+          </packages>
+        </osSpecific>
+
+        <osSpecific>
+          <osFamily>ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>krb5-kdc</name>
+            </package>
+            <package>
+              <name>krb5-admin-server</name>
+            </package>
+            <package>
+              <name>krb5-user</name>
+            </package>
+            <package>
+              <name>krb5-config</name>
+            </package>
+          </packages>
+        </osSpecific>
+
+        <osSpecific>
+          <osFamily>suse11</osFamily>
+          <packages>
+            <package>
+              <name>krb5</name>
+            </package>
+            <package>
+              <name>krb5-client</name>
+            </package>
+            <package>
+              <name>krb5-server</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <configuration-dependencies>
+        <config-type>krb5-conf</config-type>
+        <config-type>kdc-conf</config-type>
+        <config-type>kadm5-acl</config-type>
+      </configuration-dependencies>
+      <restartRequiredAfterChange>true</restartRequiredAfterChange>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_client.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_client.py
new file mode 100644
index 0000000..a341e8d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_client.py
@@ -0,0 +1,40 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from kerberos_common import *
+
+class KerberosClient(KerberosScript):
+  def install(self, env):
+    self.install_packages(env, ['krb5-server', 'krb5-libs', 'krb5-auth-dialog', 'krb5', 'krb5-kdc', 'krb5-admin-server'])
+    self.configure(env)
+
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    self.write_krb5_conf()
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+  def set_keytab(self, env):
+    KerberosScript.write_keytab_file()
+
+if __name__ == "__main__":
+  KerberosClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_common.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_common.py
new file mode 100644
index 0000000..269658b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_common.py
@@ -0,0 +1,398 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import base64
+import os
+import string
+import subprocess
+import sys
+import tempfile
+
+from resource_management import *
+from utils import get_property_value
+
+class KerberosScript(Script):
+  KRB5_REALM_PROPERTIES = [
+    'kdc',
+    'admin_server',
+    'default_domain',
+    'master_kdc'
+  ]
+
+  KRB5_SECTION_NAMES = [
+    'libdefaults',
+    'logging',
+    'realms',
+    'domain_realm',
+    'capaths',
+    'ca_paths',
+    'appdefaults',
+    'plugins'
+  ]
+
+  @staticmethod
+  def create_random_password():
+    import random
+
+    chars = string.digits + string.ascii_letters
+    return ''.join(random.choice(chars) for x in range(13))
+
+  @staticmethod
+  def write_conf_section(output_file, section_name, section_data):
+    if section_name is not None:
+      output_file.write('[%s]\n' % section_name)
+
+      if section_data is not None:
+        for key, value in section_data.iteritems():
+          output_file.write(" %s = %s\n" % (key, value))
+
+
+  @staticmethod
+  def _write_conf_realm(output_file, realm_name, realm_data):
+    """ Writes out realm details
+
+    Example:
+
+     EXAMPLE.COM = {
+      kdc = kerberos.example.com
+      admin_server = kerberos.example.com
+     }
+
+    """
+    if realm_name is not None:
+      output_file.write(" %s = {\n" % realm_name)
+
+      if realm_data is not None:
+        for key, value in realm_data.iteritems():
+          if key in KerberosScript.KRB5_REALM_PROPERTIES:
+            output_file.write("  %s = %s\n" % (key, value))
+
+      output_file.write(" }\n")
+
+  @staticmethod
+  def write_conf_realms_section(output_file, section_name, realms_data):
+    if section_name is not None:
+      output_file.write('[%s]\n' % section_name)
+
+      if realms_data is not None:
+        for realm, realm_data in realms_data.iteritems():
+          KerberosScript._write_conf_realm(output_file, realm, realm_data)
+          output_file.write('\n')
+
+  @staticmethod
+  def write_krb5_conf():
+    import params
+
+    Directory(params.krb5_conf_dir,
+              owner='root',
+              recursive=True,
+              group='root',
+              mode=0755
+    )
+
+    if (params.krb5_conf_template is None) or not params.krb5_conf_template.strip():
+      content = Template('krb5_conf.j2')
+    else:
+      content = InlineTemplate(params.krb5_conf_template)
+
+    File(params.krb5_conf_path,
+         content=content,
+         owner='root',
+         group='root',
+         mode=0644
+    )
+
+  @staticmethod
+  def invoke_kadmin(query, admin_identity=None, default_realm=None):
+    """
+    Executes the kadmin or kadmin.local command (depending on whether auth_identity is set or not
+    and returns command result code and standard out data.
+
+    :param query: the kadmin query to execute
+    :param admin_identity: the identity for the administrative user (optional)
+    :param default_realm: the default realm to assume
+    :return: return_code, out
+    """
+    if (query is not None) and (len(query) > 0):
+      auth_principal = None
+      auth_keytab_file = None
+
+      if admin_identity is not None:
+        auth_principal = get_property_value(admin_identity, 'principal')
+
+      if auth_principal is None:
+        kadmin = 'kadmin.local'
+        credential = ''
+      else:
+        kadmin = 'kadmin -p "%s"' % auth_principal
+
+        auth_password = get_property_value(admin_identity, 'password')
+
+        if auth_password is None:
+          auth_keytab = get_property_value(admin_identity, 'keytab')
+
+          if auth_keytab is not None:
+            (fd, auth_keytab_file) = tempfile.mkstemp()
+            os.write(fd, base64.b64decode(auth_keytab))
+            os.close(fd)
+
+          credential = '-k -t %s' % auth_keytab_file
+        else:
+          credential = '-w "%s"' % auth_password
+
+      if (default_realm is not None) and (len(default_realm) > 0):
+        realm = '-r %s' % default_realm
+      else:
+        realm = ''
+
+      try:
+        command = '%s %s %s -q "%s"' % (kadmin, credential, realm, query.replace('"', '\\"'))
+        return shell.checked_call(command)
+      except:
+        raise
+      finally:
+        if auth_keytab_file is not None:
+          os.remove(auth_keytab_file)
+
+  @staticmethod
+  def create_keytab_file(principal, path, auth_identity=None):
+    success = False
+
+    if (principal is not None) and (len(principal) > 0):
+      if (auth_identity is None) or (len(auth_identity) == 0):
+        norandkey = '-norandkey'
+      else:
+        norandkey = ''
+
+      if (path is not None) and (len(path) > 0):
+        keytab_file = '-k %s' % path
+      else:
+        keytab_file = ''
+
+      try:
+        result_code, output = KerberosScript.invoke_kadmin(
+          'ktadd %s %s %s' % (keytab_file, norandkey, principal),
+          auth_identity)
+
+        success = (result_code == 0)
+      except:
+        raise Fail("Failed to create keytab for principal: %s (in %s)" % (principal, path))
+
+    return success
+
+  @staticmethod
+  def create_keytab(principal, auth_identity=None):
+    keytab = None
+
+    (fd, temp_path) = tempfile.mkstemp()
+    os.remove(temp_path)
+
+    try:
+      if KerberosScript.create_keytab_file(principal, temp_path, auth_identity):
+        with open(temp_path, 'r') as f:
+          keytab = base64.b64encode(f.read())
+    finally:
+      if os.path.isfile(temp_path):
+        os.remove(temp_path)
+
+    return keytab
+
+  @staticmethod
+  def principal_exists(identity, auth_identity=None):
+    exists = False
+
+    if identity is not None:
+      principal = get_property_value(identity, 'principal')
+
+      if (principal is not None) and (len(principal) > 0):
+        try:
+          result_code, output = KerberosScript.invoke_kadmin('getprinc %s' % principal,
+                                                             auth_identity)
+          exists = (output is not None) and (("Principal: %s" % principal) in output)
+        except:
+          raise Fail("Failed to determine if principal exists: %s" % principal)
+
+    return exists
+
+  @staticmethod
+  def change_principal_password(identity, auth_identity=None):
+    success = False
+
+    if identity is not None:
+      principal = get_property_value(identity, 'principal')
+
+      if (principal is not None) and (len(principal) > 0):
+        password = get_property_value(identity, 'password')
+
+        if password is None:
+          credentials = '-randkey'
+        else:
+          credentials = '-pw "%s"' % password
+
+        try:
+          result_code, output = KerberosScript.invoke_kadmin(
+            'change_password %s %s' % (credentials, principal),
+            auth_identity)
+
+          success = (result_code == 0)
+        except:
+          raise Fail("Failed to create principal: %s" % principal)
+
+    return success
+
+  @staticmethod
+  def create_principal(identity, auth_identity=None):
+    success = False
+
+    if identity is not None:
+      principal = get_property_value(identity, 'principal')
+
+      if (principal is not None) and (len(principal) > 0):
+        password = get_property_value(identity, 'password')
+
+        if password is None:
+          credentials = '-randkey'
+        else:
+          credentials = '-pw "%s"' % password
+
+        try:
+          result_code, out = KerberosScript.invoke_kadmin(
+            'addprinc %s %s' % (credentials, principal),
+            auth_identity)
+
+          success = (result_code == 0)
+        except:
+          raise Fail("Failed to create principal: %s" % principal)
+
+    return success
+
+  @staticmethod
+  def create_principals(identities, auth_identity=None):
+    if identities is not None:
+      for identity in identities:
+        KerberosScript.create_principal(identity, auth_identity)
+
+  @staticmethod
+  def create_or_update_administrator_identity():
+    import params
+
+    if params.realm is not None:
+      admin_identity = params.get_property_value(params.realm, 'admin_identity')
+
+      if KerberosScript.principal_exists(admin_identity):
+        KerberosScript.change_principal_password(admin_identity)
+      else:
+        KerberosScript.create_principal(admin_identity)
+
+  @staticmethod
+  def test_kinit(identity):
+    principal = get_property_value(identity, 'principal')
+
+    if principal is not None:
+      keytab_file = get_property_value(identity, 'keytab_file')
+      keytab = get_property_value(identity, 'keytab')
+      password = get_property_value(identity, 'password')
+
+      # If a test keytab file is available, simply use it
+      if (keytab_file is not None) and (os.path.isfile(keytab_file)):
+        command = 'kinit -k -t %s %s' % (keytab_file, principal)
+        shell.checked_call(command)
+        return shell.checked_call('kdestroy')
+
+      # If base64-encoded test keytab data is available; then decode it, write it to a temporary file
+      # use it, and then remove the temporary file
+      elif keytab is not None:
+        (fd, test_keytab_file) = tempfile.mkstemp()
+        os.write(fd, base64.b64decode(keytab))
+        os.close(fd)
+
+        try:
+          command = 'kinit -k -t %s %s' % (test_keytab_file, principal)
+          shell.checked_call(command)
+          return shell.checked_call('kdestroy')
+        except:
+          raise
+        finally:
+          if test_keytab_file is not None:
+            os.remove(test_keytab_file)
+
+      # If no keytab data is available and a password was supplied, simply use it.
+      elif password is not None:
+        process = subprocess.Popen(['kinit', principal], stdin=subprocess.PIPE)
+        stdout, stderr = process.communicate(password)
+        if process.returncode:
+          err_msg = Logger.get_protected_text("Execution of kinit returned %d. %s" % (process.returncode, stderr))
+          raise Fail(err_msg)
+        else:
+          return shell.checked_call('kdestroy')
+      else:
+        return 0, ''
+    else:
+      return 0, ''
+
+
+  @staticmethod
+  def write_keytab_file():
+    import params
+
+    if params.keytab_details is not None:
+      data = get_property_value(params.keytab_details, 'data')
+
+      if (data is not None) and (len(data) > 0):
+        file_path = get_property_value(params.keytab_details, 'file-path')
+
+        if (file_path is not None) and (len(file_path) > 0):
+          with open(file_path, 'w') as f:
+            f.write(base64.b64decode(data))
+
+          KerberosScript._set_file_access(file_path, params.keytab_details, params.default_group)
+
+
+  @staticmethod
+  def _set_file_access(file_path, access_details, default_group=None):
+    if (file_path is not None) and os.path.isfile(file_path) and (access_details is not None):
+      import stat
+      import pwd
+      import grp
+
+      owner = get_property_value(access_details, 'owner/name')
+      owner_access = get_property_value(access_details, 'owner/access', 'rw')
+      group = get_property_value(access_details, 'group/name', default_group)
+      group_access = get_property_value(access_details, 'group/access', '')
+
+      pwnam = pwd.getpwnam(owner) if (owner is not None) and (len(owner) > 0) else None
+      uid = pwnam.pw_uid if pwnam is not None else os.geteuid()
+
+      grnam = grp.getgrnam(group) if (group is not None) and (len(group) > 0) else None
+      gid = grnam.gr_gid if grnam is not None else os.getegid()
+
+      chmod = 0
+
+      if owner_access == 'r':
+        chmod |= stat.S_IREAD
+      else:
+        chmod |= stat.S_IREAD | stat.S_IWRITE
+
+      if group_access == 'rw':
+        chmod |= stat.S_IRGRP | stat.S_IWGRP
+      elif group_access == 'r':
+        chmod |= stat.S_IRGRP
+
+      os.chmod(file_path, chmod)
+      os.chown(file_path, uid, gid)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_server.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_server.py
new file mode 100644
index 0000000..3e15f50
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/kerberos_server.py
@@ -0,0 +1,144 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from kerberos_common import *
+
+class KerberosServer(KerberosScript):
+  @staticmethod
+  def write_kadm5_acl():
+    import params
+
+    Directory(params.kadm5_acl_dir,
+              owner='root',
+              recursive=True,
+              group='root',
+              mode=0700
+    )
+
+    if (params.kadm5_acl_template is None) or not params.kadm5_acl_template.strip():
+      content = Template('kadm5_acl.j2')
+    else:
+      content = InlineTemplate(params.kadm5_acl_template)
+
+    File(params.kadm5_acl_path,
+         content=content,
+         owner='root',
+         group='root',
+         mode=0600
+    )
+
+  @staticmethod
+  def write_kdc_conf():
+    import params
+
+    Directory(params.kdc_conf_dir,
+              owner='root',
+              recursive=True,
+              group='root',
+              mode=0700
+    )
+
+    if (params.kdc_conf_template is None) or not params.kdc_conf_template.strip():
+      content = Template('kdc_conf.j2')
+    else:
+      content = InlineTemplate(params.kdc_conf_template)
+
+    File(params.kdc_conf_path,
+         content=content,
+         owner='root',
+         group='root',
+         mode=0600
+    )
+
+  def install(self, env):
+    import params
+
+    self.install_packages(env)
+    self.configure(env)
+
+    # Create the Kerberos database (only on install, for now)
+    Execute(
+      "%s create -s -P '%s'" % (params.kdb5_util_path, KerberosScript.create_random_password()))
+
+    # Create or update the administrator account
+    KerberosScript.create_or_update_administrator_identity()
+
+
+  def start(self, env):
+    os_family = System.get_instance().os_family
+
+    # Attempt to reconfigure the service before starting
+    self.configure(env)
+
+    # Create or update the administrator account
+    KerberosScript.create_or_update_administrator_identity()
+
+    if os_family == "suse":
+      Execute('rckadmind start')
+      Execute('rckrb5kdc start')
+    elif os_family == 'ubuntu':
+      Execute('service krb5-kdc start')
+      Execute('service krb5-admin-server start')
+    else:
+      Execute('service krb5kdc start')
+      Execute('service kadmin start')
+
+  def stop(self, env):
+    os_family = System.get_instance().os_family
+
+    if os_family == "suse":
+      Execute('rckadmind stop')
+      Execute('rckrb5kdc stop')
+    elif os_family == 'ubuntu':
+      Execute('service krb5-kdc stop')
+      Execute('service krb5-admin-server stop')
+    else:
+      Execute('service krb5kdc stop')
+      Execute('service kadmin stop')
+
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    KerberosServer.write_krb5_conf()
+    KerberosServer.write_kdc_conf()
+    KerberosServer.write_kadm5_acl()
+
+  def status(self, env):
+    import params
+
+    if params.os_family == "suse":
+      try:
+        Execute('checkproc `which krb5kdc`')
+        Execute('checkproc `which kadmind`')
+      except Fail as ex:
+        raise ComponentIsNotRunning()
+
+    elif params.os_family == 'ubuntu':
+      check_process_status(params.kdamin_pid_path)
+      check_process_status(params.krb5kdc_pid_path)
+
+    else:
+      check_process_status(params.kdamin_pid_path)
+      check_process_status(params.krb5kdc_pid_path)
+
+
+if __name__ == "__main__":
+  KerberosServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
new file mode 100644
index 0000000..cff6250
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/params.py
@@ -0,0 +1,211 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from utils import get_property_value, get_unstructured_data
+
+os_family = System.get_instance().os_family
+
+krb5_conf_dir = '/etc'
+krb5_conf_file = 'krb5.conf'
+krb5_conf_path = krb5_conf_dir + '/' + krb5_conf_file
+
+if os_family == 'suse':
+  kdc_conf_dir = '/var/lib/kerberos/krb5kdc'
+elif os_family == 'ubuntu':
+  kdc_conf_dir = '/etc/krb5kdc'
+else:
+  kdc_conf_dir = '/var/kerberos/krb5kdc'
+kdc_conf_file = 'kdc.conf'
+kdc_conf_path = kdc_conf_dir + '/' + kdc_conf_file
+
+kadm5_acl_dir = kdc_conf_dir  # Typically kadm5.acl and kdc.conf exist in the same directory
+kadm5_acl_file = 'kadm5.acl'
+kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file
+
+config = Script.get_config()
+
+command_params = None
+configurations = None
+keytab_details = None
+default_group = None
+cluster_env = None
+kdc_server_host = None
+cluster_host_info = None
+
+kdb5_util_path = 'kdb5_util'
+
+kdamin_pid_path = '/var/run/kadmind.pid'
+krb5kdc_pid_path = '/var/run/krb5kdc.pid'
+
+smoke_test_principal = None
+smoke_test_keytab_file = None
+
+# If a test keytab file is available, simply use it
+
+
+if config is not None:
+  command_params = get_property_value(config, 'commandParams')
+  if command_params is not None:
+    keytab_details = get_unstructured_data(command_params, 'keytab')
+
+  configurations = get_property_value(config, 'configurations')
+  if configurations is not None:
+    cluster_env = get_property_value(configurations, 'cluster-env')
+
+    if cluster_env is not None:
+      smoke_test_principal = get_property_value(cluster_env, 'smokeuser')
+      smoke_test_keytab_file = get_property_value(cluster_env, 'smokeuser_keytab')
+
+      default_group = get_property_value(cluster_env, 'user_group')
+
+      if default_group is None:
+        default_group = get_property_value(cluster_env, 'user-group')
+
+  cluster_host_info = get_property_value(config, 'clusterHostInfo')
+  if cluster_host_info is not None:
+    kdc_server_hosts = get_property_value(cluster_host_info, 'kdc_server_hosts')
+
+    if (kdc_server_hosts is not None) and (len(kdc_server_hosts) > 0):
+      kdc_server_host = kdc_server_hosts[0]
+
+  # ################################################################################################
+  # Get krb5.conf template data
+  # ################################################################################################
+  logging_default = 'FILE:/var/log/krb5libs.log'
+  logging_kdc = 'FILE:/var/log/krb5kdc.log'
+  logging_admin_server = 'FILE:/var/log/kadmind.log'
+  libdefaults_dns_lookup_realm = 'false'
+  libdefaults_dns_lookup_kdc = 'false'
+  libdefaults_ticket_lifetime = '24h'
+  libdefaults_renew_lifetime = '7d'
+  libdefaults_forwardable = 'true'
+  libdefaults_default_tgs_enctypes = 'aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 ' \
+                                     'arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac ' \
+                                     'des-cbc-crc des-cbc-md5 des-cbc-md4'
+  libdefaults_default_tkt_enctypes = 'aes256-cts-hmac-sha1-96 aes128-cts-hmac-sha1-96 des3-cbc-sha1 ' \
+                                     'arcfour-hmac-md5 camellia256-cts-cmac camellia128-cts-cmac ' \
+                                     'des-cbc-crc des-cbc-md5 des-cbc-md4'
+  realm = 'EXAMPLE.COM'
+  domains = ''
+  kdc_host = 'localhost'
+  admin_server_host = None
+  admin_principal = None
+  admin_password = None
+  admin_keytab = None
+  test_principal = None
+  test_password = None
+  test_keytab = None
+  test_keytab_file = None
+
+  krb5_conf_template = None
+
+  krb5_conf_data = get_property_value(configurations, 'krb5-conf')
+
+  if krb5_conf_data is not None:
+    logging_default = get_property_value(krb5_conf_data, 'logging_default', logging_default)
+    logging_kdc = get_property_value(krb5_conf_data, 'logging_kdc', logging_kdc)
+    logging_admin_server = get_property_value(krb5_conf_data, 'logging_admin_server',
+                                              logging_admin_server)
+    libdefaults_dns_lookup_realm = get_property_value(krb5_conf_data,
+                                                      'libdefaults_dns_lookup_realm',
+                                                      libdefaults_dns_lookup_realm)
+    libdefaults_dns_lookup_kdc = get_property_value(krb5_conf_data, 'libdefaults_dns_lookup_kdc',
+                                                    libdefaults_dns_lookup_kdc)
+    libdefaults_ticket_lifetime = get_property_value(krb5_conf_data, 'libdefaults_ticket_lifetime',
+                                                     libdefaults_ticket_lifetime)
+    libdefaults_renew_lifetime = get_property_value(krb5_conf_data, 'libdefaults_renew_lifetime',
+                                                    libdefaults_renew_lifetime)
+    libdefaults_forwardable = get_property_value(krb5_conf_data, 'libdefaults_forwardable',
+                                                 libdefaults_forwardable)
+    libdefaults_default_tgs_enctypes = get_property_value(krb5_conf_data,
+                                                          'libdefaults_default_tgs_enctypes',
+                                                          libdefaults_default_tgs_enctypes)
+    libdefaults_default_tkt_enctypes = get_property_value(krb5_conf_data,
+                                                          'libdefaults_default_tkt_enctypes',
+                                                          libdefaults_default_tkt_enctypes)
+    realm = get_property_value(krb5_conf_data, 'realm', realm)
+    domains = get_property_value(krb5_conf_data, 'domains', domains)
+    kdc_host = get_property_value(krb5_conf_data, 'kdc_host', kdc_host)
+    admin_server_host = get_property_value(krb5_conf_data, 'admin_server_host', admin_server_host)
+
+    admin_principal = get_property_value(krb5_conf_data, 'admin_principal', admin_principal)
+    admin_password = get_property_value(krb5_conf_data, 'admin_password', admin_password)
+    admin_keytab = get_property_value(krb5_conf_data, 'admin_keytab', admin_keytab)
+
+    # If the admin keytab is just white space, set it to None
+    if admin_keytab is not None:
+      admin_keytab = admin_keytab.strip()
+
+      if len(admin_keytab) == 0:
+        admin_keytab = None
+
+    test_principal = get_property_value(krb5_conf_data, 'test_principal', test_principal)
+    test_password = get_property_value(krb5_conf_data, 'test_password', test_password)
+    test_keytab = get_property_value(krb5_conf_data, 'test_keytab', test_keytab)
+    test_keytab_file = get_property_value(krb5_conf_data, 'test_keytab_file', test_keytab_file)
+
+    # If the test keytab is just white space, set it to None
+    if test_keytab is not None:
+      test_keytab = test_keytab.strip()
+
+      if len(test_keytab) == 0:
+        test_keytab = None
+
+    krb5_conf_template = get_property_value(krb5_conf_data, 'content', krb5_conf_template)
+    krb5_conf_dir = get_property_value(krb5_conf_data, 'conf_dir', krb5_conf_dir)
+    krb5_conf_file = get_property_value(krb5_conf_data, 'conf_file', krb5_conf_file)
+    krb5_conf_path = krb5_conf_dir + '/' + krb5_conf_file
+
+  # ################################################################################################
+  # Get kdc.conf template data
+  # ################################################################################################
+  kdcdefaults_kdc_ports = "88"
+  kdcdefaults_kdc_tcp_ports = "88"
+
+  kdc_conf_template = None
+
+  kdc_conf_data = get_property_value(configurations, 'kdc-conf')
+
+  if kdc_conf_data is not None:
+    kdcdefaults_kdc_ports = get_property_value(kdc_conf_data, 'kdcdefaults_kdc_ports',
+                                               kdcdefaults_kdc_ports)
+    kdcdefaults_kdc_tcp_ports = get_property_value(kdc_conf_data, 'kdcdefaults_kdc_tcp_ports',
+                                                   kdcdefaults_kdc_tcp_ports)
+
+    kdc_conf_template = get_property_value(kdc_conf_data, 'content', kdc_conf_template)
+    kdc_conf_dir = get_property_value(kdc_conf_data, 'conf_dir', kdc_conf_dir)
+    kdc_conf_file = get_property_value(kdc_conf_data, 'conf_file', kdc_conf_file)
+    kdc_conf_path = kdc_conf_dir + '/' + kdc_conf_file
+
+  # ################################################################################################
+  # Get kadm5.acl template data
+  # ################################################################################################
+  kdcdefaults_kdc_ports = '88'
+  kdcdefaults_kdc_tcp_ports = '88'
+
+  kadm5_acl_template = None
+
+  kadm5_acl_data = get_property_value(configurations, 'kadm5-acl')
+
+  if kadm5_acl_data is not None:
+    kadm5_acl_template = get_property_value(kadm5_acl_data, 'content', kadm5_acl_template)
+    kadm5_acl_dir = get_property_value(kadm5_acl_data, 'conf_dir', kadm5_acl_dir)
+    kadm5_acl_file = get_property_value(kadm5_acl_data, 'conf_file', kadm5_acl_file)
+    kadm5_acl_path = kadm5_acl_dir + '/' + kadm5_acl_file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/service_check.py
new file mode 100644
index 0000000..73b9c7a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/service_check.py
@@ -0,0 +1,63 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from kerberos_common import *
+from resource_management import *
+
+class KerberosServiceCheck(KerberosScript):
+  def service_check(self, env):
+    import params
+
+    # First attempt to test using the smoke test user, if data is available
+    if ((params.smoke_test_principal is not None) and
+          (params.smoke_test_keytab_file is not None) and
+          os.path.isfile(params.smoke_test_keytab_file)):
+      print "Performing kinit using smoke test user: %s" % params.smoke_test_principal
+      code, out = self.test_kinit({
+        'principal': params.smoke_test_principal,
+        'keytab_file': params.smoke_test_keytab_file
+      })
+      test_performed = True
+
+    # Else if a test credentials is specified, try to test using that
+    elif params.test_principal is not None:
+      print "Performing kinit using test user: %s" % params.test_principal
+      code, out = self.test_kinit({
+        'principal': params.test_principal,
+        'keytab_file': params.test_keytab_file,
+        'keytab': params.test_keytab,
+        'password': params.test_password
+      })
+      test_performed = True
+
+    else:
+      code = 0
+      out = ''
+      test_performed = False
+
+    if test_performed:
+      if code == 0:
+        print "Test executed successfully."
+      else:
+        print "Test failed with error code %d: %s." % (code, out)
+
+if __name__ == "__main__":
+  KerberosServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/utils.py
new file mode 100644
index 0000000..79e89e6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/scripts/utils.py
@@ -0,0 +1,69 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+def get_property_value(dictionary, property_name, null_value=None):
+  return dictionary[property_name] if property_name in dictionary else null_value
+
+def get_unstructured_data(dictionary, property_name):
+  prefix = property_name + '/'
+  prefix_len = len(prefix)
+  return dict((k[prefix_len:], v) for k, v in dictionary.iteritems() if k.startswith(prefix))
+
+def split_host_and_port(host):
+  """
+  Splits a string into its host and port components
+
+  :param host: a string matching the following patern: <host name | ip address>[:port]
+  :return: a Dictionary containing 'host' and 'port' entries for the input value
+  """
+
+  if host is None:
+    host_and_port = None
+  else:
+    host_and_port = {}
+    parts = host.split(":")
+
+    if parts is not None:
+      length = len(parts)
+
+      if length > 0:
+        host_and_port['host'] = parts[0]
+
+        if length > 1:
+          host_and_port['port'] = int(parts[1])
+
+  return host_and_port
+
+def set_port(host, port):
+  """
+  Sets the port for a host specification, potentially replacing an existing port declaration
+
+  :param host: a string matching the following pattern: <host name | ip address>[:port]
+  :param port: a string or integer declaring the (new) port
+  :return: a string declaring the new host/port specification
+  """
+  if port is None:
+    return host
+  else:
+    host_and_port = split_host_and_port(host)
+
+    if (host_and_port is not None) and ('host' in host_and_port):
+      return "%s:%s" % (host_and_port['host'], port)
+    else:
+      return host

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kadm5_acl.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kadm5_acl.j2 b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kadm5_acl.j2
new file mode 100644
index 0000000..d82ae23
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kadm5_acl.j2
@@ -0,0 +1,20 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+*/admin@{{realm}}	*
+
+{# Append additional realm declarations should be placed below #}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kdc_conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kdc_conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kdc_conf.j2
new file mode 100644
index 0000000..c067bae
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/kdc_conf.j2
@@ -0,0 +1,30 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+[kdcdefaults]
+  kdc_ports = {{kdcdefaults_kdc_ports}}
+  kdc_tcp_ports = {{kdcdefaults_kdc_tcp_ports}}
+
+[realms]
+  {{realm}} = {
+    acl_file = {{kadm5_acl_path}}
+    dict_file = /usr/share/dict/words
+    admin_keytab = {{kadm5_acl_dir}}/kadm5.keytab
+    supported_enctypes = {{libdefaults_default_tgs_enctypes}}
+  }
+
+{# Append additional realm declarations should be placed below #}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2 b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
new file mode 100644
index 0000000..db1015a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KERBEROS/package/templates/krb5_conf.j2
@@ -0,0 +1,47 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+[libdefaults]
+  renew_lifetime = {{libdefaults_renew_lifetime}}
+  forwardable = {{libdefaults_forwardable}}
+  default_realm = {{realm|upper()}}
+  ticket_lifetime = {{libdefaults_ticket_lifetime}}
+  dns_lookup_realm = {{libdefaults_dns_lookup_realm}}
+  dns_lookup_kdc = {{libdefaults_dns_lookup_kdc}}
+
+{% if domains %}
+[domain_realm]
+{% for domain in domains.split(',') %}
+  {{domain}} = {{realm|upper()}}
+{% endfor %}
+{% endif %}
+
+[logging]
+  default = {{logging_default}}
+{#
+# The following options are unused unless a managed KDC is installed
+  admin_server = {{logging_admin_server}}
+  kdc = {{logging_admin_kdc}}
+#}
+
+[realms]
+  {{realm}} = {
+    admin_server = {{admin_server_host|default(kdc_host, True)}}
+    kdc = {{kdc_host}}
+  }
+
+{# Append additional realm declarations below #}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c15cd0e0/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/alerts.json b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/alerts.json
new file mode 100644
index 0000000..236875a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2.GlusterFS/services/KNOX/alerts.json
@@ -0,0 +1,26 @@
+{
+  "KNOX": {
+    "service": [],
+    "KNOX_GATEWAY": [
+      {
+        "name": "knox_gateway_process",
+        "label": "Know Gateway Process",
+        "interval": 1,
+        "scope": "HOST",
+        "source": {
+          "type": "PORT",
+          "uri": "{{gateway-site/gateway.port}}",
+          "default_port": 8443,
+          "reporting": {
+            "ok": {
+              "text": "TCP OK - {0:.4f} response on port {1}"
+            },
+            "critical": {
+              "text": "Connection failed: {0} to {1}:{2}"
+            }
+          }
+        }
+      }
+    ]
+  }
+}