You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ma...@apache.org on 2014/01/18 00:40:04 UTC

[08/37] AMBARI-4341. Rename 2.0.8 to 2.1.1 in the stack definition. (mahadev)

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hcat_service_check.py
new file mode 100644
index 0000000..5112e99
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hcat_service_check.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def hcat_service_check():
+    import params
+
+    unique = get_unique_id_and_date()
+    output_file = format("/apps/hive/warehouse/hcatsmoke{unique}")
+    test_cmd = format("fs -test -e {output_file}")
+
+    if params.security_enabled:
+      kinit_cmd = format(
+        "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+    else:
+      kinit_cmd = ""
+
+    File('/tmp/hcatSmoke.sh',
+         content=StaticFile("hcatSmoke.sh"),
+         mode=0755
+    )
+
+    prepare_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} prepare")
+
+    Execute(prepare_cmd,
+            tries=3,
+            user=params.smokeuser,
+            try_sleep=5,
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            logoutput=True)
+
+    ExecuteHadoop(test_cmd,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir)
+
+    cleanup_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} cleanup")
+
+    Execute(cleanup_cmd,
+            tries=3,
+            user=params.smokeuser,
+            try_sleep=5,
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            logoutput=True
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive.py
new file mode 100644
index 0000000..b37ebb2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+
+
+def hive(name=None):
+  import params
+
+  if name == 'metastore' or name == 'hiveserver2':
+    hive_config_dir = params.hive_server_conf_dir
+    config_file_mode = 0600
+    jdbc_connector()
+  else:
+    hive_config_dir = params.hive_conf_dir
+    config_file_mode = 0644
+
+  Directory(hive_config_dir,
+            owner=params.hive_user,
+            group=params.user_group,
+            recursive=True
+  )
+
+  XmlConfig("hive-site.xml",
+            conf_dir=hive_config_dir,
+            configurations=params.config['configurations']['hive-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=config_file_mode
+  )
+
+  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 "
+               "{jdk_location}{check_db_connection_jar_name} -o {check_db_connection_jar_name}'")
+
+  Execute(cmd,
+          not_if=format("[ -f {check_db_connection_jar_name}]"))
+
+  if name == 'metastore':
+    File(params.start_metastore_path,
+         mode=0755,
+         content=StaticFile('startMetastore.sh')
+    )
+
+  elif name == 'hiveserver2':
+    File(params.start_hiveserver2_path,
+         mode=0755,
+         content=StaticFile('startHiveserver2.sh')
+    )
+
+  if name != "client":
+    crt_directory(params.hive_pid_dir)
+    crt_directory(params.hive_log_dir)
+    crt_directory(params.hive_var_lib)
+
+  File(format("{hive_config_dir}/hive-env.sh"),
+       owner=params.hive_user,
+       group=params.user_group,
+       content=Template('hive-env.sh.j2', conf_dir=hive_config_dir)
+  )
+
+  crt_file(format("{hive_conf_dir}/hive-default.xml.template"))
+  crt_file(format("{hive_conf_dir}/hive-env.sh.template"))
+  crt_file(format("{hive_conf_dir}/hive-exec-log4j.properties.template"))
+  crt_file(format("{hive_conf_dir}/hive-log4j.properties.template"))
+
+
+def crt_directory(name):
+  import params
+
+  Directory(name,
+            recursive=True,
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+
+
+def crt_file(name):
+  import params
+
+  File(name,
+       owner=params.hive_user,
+       group=params.user_group
+  )
+
+
+def jdbc_connector():
+  import params
+
+  if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
+    cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
+
+    Execute(cmd,
+            not_if=format("test -f {target}"),
+            creates=params.target,
+            path=["/bin", "usr/bin/"])
+
+  elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+    cmd = format(
+      "mkdir -p {artifact_dir} ; curl -kf --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
+      "cp {driver_curl_target} {target}")
+
+    Execute(cmd,
+            not_if=format("test -f {target}"),
+            path=["/bin", "usr/bin/"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_client.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_client.py
new file mode 100644
index 0000000..0a5fb2b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_client.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import sys
+from resource_management import *
+
+from hive import hive
+
+class HiveClient(Script):
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='client')
+
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_metastore.py
new file mode 100644
index 0000000..c741174
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_metastore.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from hive import hive
+from hive_service import hive_service
+
+class HiveMetastore(Script):
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='metastore')
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    hive_service( 'metastore',
+                   action = 'start'
+    )
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    hive_service( 'metastore',
+                   action = 'stop'
+    )
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+if __name__ == "__main__":
+  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_server.py
new file mode 100644
index 0000000..3ad81a1
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_server.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from hive import hive
+from hive_service import hive_service
+
+class HiveServer(Script):
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='hiveserver2')
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    hive_service( 'hiveserver2',
+                  action = 'start'
+    )
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    hive_service( 'hiveserver2',
+                  action = 'stop'
+    )
+
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+if __name__ == "__main__":
+  HiveServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_service.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_service.py
new file mode 100644
index 0000000..e8d4e5c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/hive_service.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+
+def hive_service(
+    name,
+    action='start'):
+
+  import params
+
+  if name == 'metastore':
+    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    cmd = format(
+      "env HADOOP_HOME={hadoop_home} JAVA_HOME={java64_home} {start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir}")
+  elif name == 'hiveserver2':
+    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    cmd = format(
+      "env JAVA_HOME={java64_home} {start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir}")
+
+  if action == 'start':
+    demon_cmd = format("{cmd}")
+    no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
+    Execute(demon_cmd,
+            user=params.hive_user,
+            not_if=no_op_test
+    )
+
+    if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+      db_connection_check_command = format(
+        "{java64_home}/bin/java -cp {check_db_connection_jar}:/usr/share/java/{jdbc_jar_name} org.apache.ambari.server.DBConnectionVerification {hive_jdbc_connection_url} {hive_metastore_user_name} {hive_metastore_user_passwd} {hive_jdbc_driver}")
+      Execute(db_connection_check_command,
+              path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin')
+
+  elif action == 'stop':
+    demon_cmd = format("kill `cat {pid_file}` >/dev/null 2>&1 && rm -f {pid_file}")
+    Execute(demon_cmd)
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_server.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_server.py
new file mode 100644
index 0000000..a45d310
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_server.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from mysql_service import mysql_service
+
+class MysqlServer(Script):
+
+  if System.get_instance().platform == "suse":
+    daemon_name = 'mysql'
+  else:
+    daemon_name = 'mysqld'
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    mysql_service(daemon_name=self.daemon_name, action='start')
+
+    File(params.mysql_adduser_path,
+         mode=0755,
+         content=StaticFile('addMysqlUser.sh')
+    )
+
+    # Autoescaping
+    cmd = ("bash", "-x", params.mysql_adduser_path, self.daemon_name,
+           params.hive_metastore_user_name, str(params.hive_metastore_user_passwd) , params.mysql_host[0])
+
+    Execute(cmd,
+            tries=3,
+            try_sleep=5,
+            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+            logoutput=True
+    )
+
+    mysql_service(daemon_name=self.daemon_name, action='stop')
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+
+    mysql_service(daemon_name=self.daemon_name, action = 'start')
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    mysql_service(daemon_name=self.daemon_name, action = 'stop')
+
+  def status(self, env):
+    mysql_service(daemon_name=self.daemon_name, action = 'status')
+
+if __name__ == "__main__":
+  MysqlServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_service.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_service.py
new file mode 100644
index 0000000..cfb3e08
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/mysql_service.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+
+def mysql_service(daemon_name=None, action='start'):
+  cmd = format('service {daemon_name} {action}')
+
+  if action == 'status':
+    logoutput = False
+  else:
+    logoutput = True
+
+  Execute(cmd,
+          path="/usr/local/bin/:/bin/:/sbin/",
+          tries=1,
+          logoutput=logoutput)
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/params.py
new file mode 100644
index 0000000..0cf89be
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/params.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import status_params
+
+# server configurations
+config = Script.get_config()
+
+hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+hive_server_conf_dir = "/etc/hive/conf.server"
+hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
+
+hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
+
+#users
+hive_user = config['configurations']['global']['hive_user']
+hive_lib = '/usr/lib/hive/lib/'
+#JDBC driver jar name
+hive_jdbc_driver = default('hive_jdbc_driver', 'com.mysql.jdbc.Driver')
+if hive_jdbc_driver == "com.mysql.jdbc.Driver":
+  jdbc_jar_name = "mysql-connector-java.jar"
+elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+  jdbc_jar_name = "ojdbc6.jar"
+
+check_db_connection_jar_name = "DBConnectionVerification.jar"
+check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
+
+#common
+hive_metastore_port = config['configurations']['global']['hive_metastore_port']
+hive_var_lib = '/var/lib/hive'
+hive_server_host = config['clusterHostInfo']['hive_server_host']
+hive_url = format("jdbc:hive2://{hive_server_host}:10000")
+
+smokeuser = config['configurations']['global']['smokeuser']
+smoke_test_sql = "/tmp/hiveserver2.sql"
+smoke_test_path = "/tmp/hiveserver2Smoke.sh"
+smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
+
+security_enabled = config['configurations']['global']['security_enabled']
+
+kinit_path_local = get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+
+#hive_env
+hive_conf_dir = "/etc/hive/conf"
+hive_dbroot = config['configurations']['global']['hive_dbroot']
+hive_log_dir = config['configurations']['global']['hive_log_dir']
+hive_pid_dir = status_params.hive_pid_dir
+hive_pid = status_params.hive_pid
+
+#hive-site
+hive_database_name = config['configurations']['global']['hive_database_name']
+
+#Starting hiveserver2
+start_hiveserver2_script = 'startHiveserver2.sh'
+
+hadoop_home = '/usr'
+
+##Starting metastore
+start_metastore_script = 'startMetastore.sh'
+hive_metastore_pid = status_params.hive_metastore_pid
+java_share_dir = '/usr/share/java'
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+hdfs_user =  config['configurations']['global']['hdfs_user']
+user_group = config['configurations']['global']['user_group']
+artifact_dir = "/tmp/HDP-artifacts/"
+
+target = format("{hive_lib}/{jdbc_jar_name}")
+
+jdk_location = config['hostLevelParams']['jdk_location']
+driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
+
+start_hiveserver2_path = "/tmp/start_hiveserver2_script"
+start_metastore_path = "/tmp/start_metastore_script"
+
+hive_aux_jars_path = config['configurations']['global']['hive_aux_jars_path']
+hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
+java64_home = config['hostLevelParams']['java_home']
+
+##### MYSQL
+
+db_name = config['configurations']['global']['hive_database_name']
+mysql_user = "mysql"
+mysql_group = 'mysql'
+mysql_host = config['clusterHostInfo']['hive_mysql_host']
+
+mysql_adduser_path = "/tmp/addMysqlUser.sh"
+
+########## HCAT
+
+hcat_conf_dir = '/etc/hcatalog/conf'
+
+metastore_port = 9933
+hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+
+hcat_dbroot = hcat_lib
+
+hcat_user = config['configurations']['global']['hcat_user']
+webhcat_user = config['configurations']['global']['webhcat_user']
+
+hcat_pid_dir = status_params.hcat_pid_dir
+hcat_log_dir = config['configurations']['global']['hcat_log_dir']   #hcat_log_dir
+
+hadoop_conf_dir = '/etc/hadoop/conf'

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/service_check.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/service_check.py
new file mode 100644
index 0000000..111e8a1
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/service_check.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+from hcat_service_check import hcat_service_check
+
+class HiveServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+    if params.security_enabled:
+      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+      hive_principal_ext = format("principal={hive_metastore_keytab_path}")
+      hive_url_ext = format("{hive_url}/\\;{hive_principal_ext}")
+      smoke_cmd = format("{kinit_cmd} env JAVA_HOME={java64_home} {smoke_test_path} {hive_url_ext} {smoke_test_sql}")
+    else:
+      smoke_cmd = format("env JAVA_HOME={java64_home} {smoke_test_path} {hive_url} {smoke_test_sql}")
+
+    File(params.smoke_test_path,
+         content=StaticFile('hiveserver2Smoke.sh'),
+         mode=0755
+    )
+
+    File(params.smoke_test_sql,
+         content=StaticFile('hiveserver2.sql')
+    )
+
+    Execute(smoke_cmd,
+            tries=3,
+            try_sleep=5,
+            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+            logoutput=True,
+            user=params.smokeuser)
+
+    hcat_service_check()
+
+if __name__ == "__main__":
+  HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/status_params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/status_params.py
new file mode 100644
index 0000000..7770975
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/scripts/status_params.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+
+hive_pid_dir = config['configurations']['global']['hive_pid_dir']
+hive_pid = 'hive-server.pid'
+
+hive_metastore_pid = 'hive.pid'
+
+hcat_pid_dir = config['configurations']['global']['hcat_pid_dir'] #hcat_pid_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hcat-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hcat-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hcat-env.sh.j2
new file mode 100644
index 0000000..2a35240
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hcat-env.sh.j2
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+JAVA_HOME={{java64_home}}
+HCAT_PID_DIR={{hcat_pid_dir}}/
+HCAT_LOG_DIR={{hcat_log_dir}}/
+HCAT_CONF_DIR={{hcat_conf_dir}}
+HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+#DBROOT is the path where the connector jars are downloaded
+DBROOT={{hcat_dbroot}}
+USER={{hcat_user}}
+METASTORE_PORT={{metastore_port}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hive-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hive-env.sh.j2 b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hive-env.sh.j2
new file mode 100644
index 0000000..548262a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/HIVE/package/templates/hive-env.sh.j2
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set Hive and Hadoop environment variables here. These variables can be used
+# to control the execution of Hive. It should be used by admins to configure
+# the Hive installation (so that users do not have to set environment variables
+# or set command line parameters to get correct behavior).
+#
+# The hive service being invoked (CLI/HWI etc.) is available via the environment
+# variable SERVICE
+
+# Hive Client memory usage can be an issue if a large number of clients
+# are running at the same time. The flags below have been useful in
+# reducing memory usage:
+#
+ if [ "$SERVICE" = "cli" ]; then
+   if [ -z "$DEBUG" ]; then
+     export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit"
+   else
+     export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit"
+   fi
+ fi
+
+# The heap size of the jvm stared by hive shell script can be controlled via:
+
+export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
+export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
+
+# Larger heap size may be required when running queries over large number of files or partitions.
+# By default hive shell scripts use a heap size of 256 (MB).  Larger heap size would also be
+# appropriate for hive server (hwi etc).
+
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+# Hive Configuration Directory can be controlled by:
+export HIVE_CONF_DIR={{conf_dir}}
+
+# Folder containing extra ibraries required for hive compilation/execution can be controlled by:
+# export HIVE_AUX_JARS_PATH=
+export HIVE_AUX_JARS_PATH={{hive_aux_jars_path}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/metainfo.xml
new file mode 100644
index 0000000..9af461e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/metainfo.xml
@@ -0,0 +1,105 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>NAGIOS</name>
+      <comment>Nagios Monitoring and Alerting system</comment>
+      <version>3.5.0</version>
+      <components>
+        <component>
+            <name>NAGIOS_SERVER</name>
+            <category>MASTER</category>
+            <commandScript>
+              <script>scripts/nagios_server.py</script>
+              <scriptType>PYTHON</scriptType>
+              <timeout>600</timeout>
+            </commandScript>
+        </component>
+      </components>
+      <osSpecifics>
+        <osSpecific>
+          <osType>any</osType>
+          <packages>
+            <package>
+              <type>rpm</type>
+              <name>perl</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>perl-Net-SNMP</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>nagios-plugins-1.4.9</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>nagios-3.5.0-99</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>nagios-www-3.5.0-99</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>nagios-devel-3.5.0-99</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>fping</name>
+            </package>
+            <package>
+              <type>rpm</type>
+              <name>hdp_mon_nagios_addons</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osType>suse</osType>
+          <package>
+            <type>rpm</type>
+            <name>php5-json</name>
+          </package>
+        </osSpecific>
+        <osSpecific>
+          <osType>centos5</osType>
+          <package>
+            <type>rpm</type>
+            <name>php-pecl-json.x86_64</name>
+          </package>
+        </osSpecific>
+        <osSpecific>
+          <osType>redhat5</osType>
+          <package>
+            <type>rpm</type>
+            <name>php-pecl-json.x86_64</name>
+          </package>
+        </osSpecific>
+        <osSpecific>
+          <osType>oraclelinux5</osType>
+          <package>
+            <type>rpm</type>
+            <name>php-pecl-json.x86_64</name>
+          </package>
+        </osSpecific>
+      </osSpecifics>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_aggregate.php
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_aggregate.php b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_aggregate.php
new file mode 100644
index 0000000..f4063fb
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_aggregate.php
@@ -0,0 +1,243 @@
+<?php
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+  $options = getopt ("f:s:n:w:c:t:");
+  if (!array_key_exists('t', $options) || !array_key_exists('f', $options) || !array_key_exists('w', $options)
+      || !array_key_exists('c', $options) || !array_key_exists('s', $options)) {
+    usage();
+    exit(3);
+  }
+  $status_file=$options['f'];
+  $status_code=$options['s'];
+  $type=$options['t'];
+  $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn);
+  $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit);
+  if ($type == "service" && !array_key_exists('n', $options)) {
+    echo "Service description not provided -n option\n";
+    exit(3);
+  }
+  if ($type == "service") {
+    $service_name=$options['n'];
+    /* echo "DESC: " . $service_name . "\n"; */
+  }
+
+  $result = array();
+  $status_file_content = file_get_contents($status_file);
+
+  $counts;
+  if ($type == "service") {
+    $counts=query_alert_count($status_file_content, $service_name, $status_code);
+  } else {
+    $counts=query_host_count($status_file_content, $status_code);
+  }
+
+  if ($counts['total'] == 0) {
+    $percent = 0;
+  } else {
+    $percent = ($counts['actual']/$counts['total'])*100;
+  }
+  if ($percent >= $crit) {
+    echo "CRITICAL: total:<" . $counts['total'] . ">, affected:<" . $counts['actual'] . ">\n";
+    exit (2);
+  }
+  if ($percent >= $warn) {
+    echo "WARNING: total:<" . $counts['total'] . ">, affected:<" . $counts['actual'] . ">\n";
+    exit (1);
+  }
+  echo "OK: total:<" . $counts['total'] . ">, affected:<" . $counts['actual'] . ">\n";
+  exit(0);
+
+
+  # Functions
+  /* print usage */
+  function usage () {
+    echo "Usage: $0 -f <status_file_path> -t type(host/service) -s <status_codes> -n <service description> -w <warn%> -c <crit%>\n";
+  }
+
+  /* Query host count */
+  function query_host_count ($status_file_content, $status_code) {
+    $num_matches = preg_match_all("/hoststatus \{([\S\s]*?)\}/", $status_file_content, $matches, PREG_PATTERN_ORDER);
+    $hostcounts_object = array ();
+    $total_hosts = 0;
+    $hosts = 0;
+    foreach ($matches[0] as $object) {
+      $total_hosts++;
+      if (getParameter($object, "current_state") == $status_code) {
+        $hosts++;
+      }
+    }
+    $hostcounts_object['total'] = $total_hosts;
+    $hostcounts_object['actual'] = $hosts;
+    return $hostcounts_object;
+  }
+
+  /* Query Alert counts */
+  function query_alert_count ($status_file_content, $service_name, $status_code) {
+    $num_matches = preg_match_all("/servicestatus \{([\S\s]*?)\}/", $status_file_content, $matches, PREG_PATTERN_ORDER);
+    $alertcounts_objects = array ();
+    $total_alerts=0;
+    $alerts=0;
+    foreach ($matches[0] as $object) {
+      if (getParameter($object, "service_description") == $service_name) {
+        $total_alerts++;
+        if (getParameter($object, "current_state") >= $status_code) {
+          $alerts++;
+        }
+      }
+    }
+    $alertcounts_objects['total'] = $total_alerts;
+    $alertcounts_objects['actual'] = $alerts;
+    return $alertcounts_objects;
+  }
+
+  function get_service_type($service_description)
+  {
+    $pieces = explode("::", $service_description);
+    switch ($pieces[0]) {
+      case "NAMENODE":
+        $pieces[0] = "HDFS";
+        break;
+      case "JOBTRACKER":
+        $pieces[0] = "MAPREDUCE";
+        break;
+      case "HBASEMASTER":
+        $pieces[0] = "HBASE";
+        break;
+      case "SYSTEM":
+      case "HDFS":
+      case "MAPREDUCE":
+      case "HBASE":
+        break;
+      default:
+        $pieces[0] = "UNKNOWN";
+    }
+    return $pieces[0];
+  }
+
+  function getParameter($object, $key)
+  {
+    $pattern="/\s" . $key . "[\s= ]*([\S, ]*)\n/";
+    $num_mat = preg_match($pattern, $object, $matches);
+    $value = "";
+    if ($num_mat) {
+      $value = $matches[1];
+    }
+    return $value;
+  }
+
+function indent($json) {
+
+    $result      = '';
+    $pos         = 0;
+    $strLen      = strlen($json);
+    $indentStr   = '  ';
+    $newLine     = "\n";
+    $prevChar    = '';
+    $outOfQuotes = true;
+
+    for ($i=0; $i<=$strLen; $i++) {
+
+        // Grab the next character in the string.
+        $char = substr($json, $i, 1);
+
+        // Are we inside a quoted string?
+        if ($char == '"' && $prevChar != '\\') {
+            $outOfQuotes = !$outOfQuotes;
+
+        // If this character is the end of an element,
+        // output a new line and indent the next line.
+        } else if(($char == '}' || $char == ']') && $outOfQuotes) {
+            $result .= $newLine;
+            $pos --;
+            for ($j=0; $j<$pos; $j++) {
+                $result .= $indentStr;
+            }
+        }
+
+        // Add the character to the result string.
+        $result .= $char;
+
+        // If the last character was the beginning of an element,
+        // output a new line and indent the next line.
+        if (($char == ',' || $char == '{' || $char == '[') && $outOfQuotes) {
+            $result .= $newLine;
+            if ($char == '{' || $char == '[') {
+                $pos ++;
+            }
+
+            for ($j = 0; $j < $pos; $j++) {
+                $result .= $indentStr;
+            }
+        }
+
+        $prevChar = $char;
+    }
+
+    return $result;
+}
+
+/* JSON documment format */
+/*
+{
+  "programstatus":{
+    "last_command_check":"1327385743"
+  },
+  "hostcounts":{
+    "up_nodes":"",
+    "down_nodes":""
+  },
+  "hoststatus":[
+    {
+      "host_name"="ip-10-242-191-48.ec2.internal",
+      "current_state":"0",
+      "last_hard_state":"0",
+      "plugin_output":"PING OK - Packet loss = 0%, RTA = 0.04 ms",
+      "last_check":"1327385564",
+      "current_attempt":"1",
+      "last_hard_state_change":"1327362079",
+      "last_time_up":"1327385574",
+      "last_time_down":"0",
+      "last_time_unreachable":"0",
+      "is_flapping":"0",
+      "last_check":"1327385574",
+      "servicestatus":[
+      ]
+    }
+  ],
+  "servicestatus":[
+    {
+      "service_type":"HDFS",  {HBASE, MAPREDUCE, HIVE, ZOOKEEPER}
+      "service_description":"HDFS Current Load",
+      "host_name"="ip-10-242-191-48.ec2.internal",
+      "current_attempt":"1",
+      "current_state":"0",
+      "plugin_output":"PING OK - Packet loss = 0%, RTA = 0.04 ms",
+      "last_hard_state_change":"1327362079",
+      "last_time_ok":"1327385479",
+      "last_time_warning":"0",
+      "last_time_unknown":"0",
+      "last_time_critical":"0",
+      "last_check":"1327385574",
+      "is_flapping":"0"
+    }
+  ]
+}
+*/
+
+?>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_cpu.pl
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_cpu.pl b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_cpu.pl
new file mode 100644
index 0000000..a5680f7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_cpu.pl
@@ -0,0 +1,114 @@
+#!/usr/bin/perl -w 
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+use strict;
+use Net::SNMP;
+use Getopt::Long;
+
+# Variable
+my $base_proc = "1.3.6.1.2.1.25.3.3.1";   
+my $proc_load = "1.3.6.1.2.1.25.3.3.1.2"; 
+my $o_host = 	undef;
+my $o_community = undef;
+my $o_warn=	undef;
+my $o_crit=	undef;
+my $o_timeout = 15;
+my $o_port = 161;
+
+sub Usage {
+    print "Usage: $0 -H <host> -C <snmp_community> -w <warn level> -c <crit level>\n";
+}
+
+Getopt::Long::Configure ("bundling");
+GetOptions(
+  'H:s'   => \$o_host,	
+  'C:s'   => \$o_community,	
+  'c:s'   => \$o_crit,        
+  'w:s'   => \$o_warn
+          );
+if (!defined $o_host || !defined $o_community || !defined $o_crit || !defined $o_warn) {
+  Usage();
+  exit 3;
+}
+$o_warn =~ s/\%//g; 
+$o_crit =~ s/\%//g;
+alarm ($o_timeout);
+$SIG{'ALRM'} = sub {
+ print "Unable to contact host: $o_host\n";
+ exit 3;
+};
+
+# Connect to host
+my ($session,$error);
+($session, $error) = Net::SNMP->session(
+		-hostname  => $o_host,
+		-community => $o_community,
+		-port      => $o_port,
+		-timeout   => $o_timeout
+	  );
+if (!defined($session)) {
+   printf("Error opening session: %s.\n", $error);
+   exit 3;
+}
+
+my $exit_val=undef;
+my $resultat =  (Net::SNMP->VERSION < 4) ?
+	  $session->get_table($base_proc)
+	: $session->get_table(Baseoid => $base_proc);
+
+if (!defined($resultat)) {
+   printf("ERROR: Description table : %s.\n", $session->error);
+   $session->close;
+   exit 3;
+}
+
+$session->close;
+
+my ($cpu_used,$ncpu)=(0,0);
+foreach my $key ( keys %$resultat) {
+  if ($key =~ /$proc_load/) {
+    $cpu_used += $$resultat{$key};
+    $ncpu++;
+  }
+}
+
+if ($ncpu==0) {
+  print "Can't find CPU usage information : UNKNOWN\n";
+  exit 3;
+}
+
+$cpu_used /= $ncpu;
+
+print "$ncpu CPU, ", $ncpu==1 ? "load" : "average load";
+printf(" %.1f%%",$cpu_used);
+$exit_val=0;
+
+if ($cpu_used > $o_crit) {
+ print " > $o_crit% : CRITICAL\n";
+ $exit_val=2;
+} else {
+  if ($cpu_used > $o_warn) {
+   print " > $o_warn% : WARNING\n";
+   $exit_val=1;
+  }
+}
+print " < $o_warn% : OK\n" if ($exit_val eq 0);
+exit $exit_val;

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_datanode_storage.php
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_datanode_storage.php b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_datanode_storage.php
new file mode 100644
index 0000000..dee22b4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_datanode_storage.php
@@ -0,0 +1,100 @@
+<?php
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This plugin makes call to master node, get the jmx-json document
+ * check the storage capacity remaining on local datanode storage
+ */
+
+  include "hdp_nagios_init.php";
+
+  $options = getopt ("h:p:w:c:e:k:r:t:s:");
+  if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options) 
+      || !array_key_exists('c', $options)) {
+    usage();
+    exit(3);
+  }
+
+  $host=$options['h'];
+  $port=$options['p'];
+  $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn);
+  $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit);
+  $keytab_path=$options['k'];
+  $principal_name=$options['r'];
+  $kinit_path_local=$options['t'];
+  $security_enabled=$options['s'];
+  $ssl_enabled=$options['e'];
+
+  /* Kinit if security enabled */
+  $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name);
+  $retcode = $status[0];
+  $output = $status[1];
+  
+  if ($output != 0) {
+    echo "CRITICAL: Error doing kinit for nagios. $output";
+    exit (2);
+  }
+
+  $protocol = ($ssl_enabled == "true" ? "https" : "http");
+
+  /* Get the json document */
+  $ch = curl_init();
+  $username = rtrim(`id -un`, "\n");
+  curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=DataNode,name=FSDatasetState-*",
+                                CURLOPT_RETURNTRANSFER => true,
+                                CURLOPT_HTTPAUTH => CURLAUTH_ANY,
+                                CURLOPT_USERPWD => "$username:",
+                                CURLOPT_SSL_VERIFYPEER => FALSE ));
+  $json_string = curl_exec($ch);
+  $info = curl_getinfo($ch);
+  if (intval($info['http_code']) == 401){
+    logout();
+    $json_string = curl_exec($ch);
+  }
+  $info = curl_getinfo($ch);
+  curl_close($ch);
+  $json_array = json_decode($json_string, true);
+  $object = $json_array['beans'][0];
+  $cap_remain = $object['Remaining']; /* Total capacity - any extenal files created in data directories by non-hadoop app */
+  $cap_total = $object['Capacity']; /* Capacity used by all data partitions minus space reserved for M/R */
+  if (count($object) == 0) {
+    echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n";
+    exit(2);
+  }  
+  $percent_full = ($cap_total - $cap_remain)/$cap_total * 100;
+
+  $out_msg = "Capacity:[" . $cap_total . 
+             "], Remaining Capacity:[" . $cap_remain . 
+             "], percent_full:[" . $percent_full  . "]";
+  
+  if ($percent_full > $crit) {
+    echo "CRITICAL: " . $out_msg . "\n";
+    exit (2);
+  }
+  if ($percent_full > $warn) {
+    echo "WARNING: " . $out_msg . "\n";
+    exit (1);
+  }
+  echo "OK: " . $out_msg . "\n";
+  exit(0);
+
+  /* print usage */
+  function usage () {
+    echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled\n";
+  }
+?>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_blocks.php
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_blocks.php b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_blocks.php
new file mode 100644
index 0000000..19347b4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_blocks.php
@@ -0,0 +1,115 @@
+<?php
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This plugin makes call to master node, get the jmx-json document
+ * check the corrupt or missing blocks % is > threshod
+ * check_jmx -H hostaddress -p port -w 1% -c 1%
+ */
+
+  include "hdp_nagios_init.php";
+
+  $options = getopt ("h:p:w:c:s:e:k:r:t:u:");
+  if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options)
+      || !array_key_exists('c', $options) || !array_key_exists('s', $options)) {
+    usage();
+    exit(3);
+  }
+
+  $hosts=$options['h'];
+  $port=$options['p'];
+  $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn);
+  $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit);
+  $nn_jmx_property=$options['s'];
+  $keytab_path=$options['k'];
+  $principal_name=$options['r'];
+  $kinit_path_local=$options['t'];
+  $security_enabled=$options['u'];
+  $ssl_enabled=$options['e'];
+
+  /* Kinit if security enabled */
+  $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name);
+  $retcode = $status[0];
+  $output = $status[1];
+  
+  if ($output != 0) {
+    echo "CRITICAL: Error doing kinit for nagios. $output";
+    exit (2);
+  }
+
+  $protocol = ($ssl_enabled == "true" ? "https" : "http");
+
+
+  foreach (preg_split('/,/', $hosts) as $host) {
+    /* Get the json document */
+
+    $ch = curl_init();
+    $username = rtrim(`id -un`, "\n");
+    curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=NameNode,name=".$nn_jmx_property,
+                                  CURLOPT_RETURNTRANSFER => true,
+                                  CURLOPT_HTTPAUTH => CURLAUTH_ANY,
+                                  CURLOPT_USERPWD => "$username:",
+                                  CURLOPT_SSL_VERIFYPEER => FALSE ));
+    $json_string = curl_exec($ch);
+    $info = curl_getinfo($ch);
+    if (intval($info['http_code']) == 401){
+      logout();
+      $json_string = curl_exec($ch);
+    }
+    $info = curl_getinfo($ch);
+    curl_close($ch);
+    $json_array = json_decode($json_string, true);
+    $m_percent = 0;
+    $c_percent = 0;
+    $object = $json_array['beans'][0];
+    $missing_blocks = $object['MissingBlocks'];
+    $corrupt_blocks = $object['CorruptBlocks'];
+    $total_blocks = $object['BlocksTotal'];
+    if (count($object) == 0) {
+      echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n";
+      exit(2);
+    }    
+    if($total_blocks == 0) {
+      $m_percent = 0;
+      $c_percent = 0;
+    } else {
+      $m_percent = ($missing_blocks/$total_blocks)*100;
+      $c_percent = ($corrupt_blocks/$total_blocks)*100;
+      break;
+    }
+  }
+  $out_msg = "corrupt_blocks:<" . $corrupt_blocks .
+             ">, missing_blocks:<" . $missing_blocks .
+             ">, total_blocks:<" . $total_blocks . ">";
+
+  if ($m_percent > $crit || $c_percent > $crit) {
+    echo "CRITICAL: " . $out_msg . "\n";
+    exit (2);
+  }
+  if ($m_percent > $warn || $c_percent > $warn) {
+    echo "WARNING: " . $out_msg . "\n";
+    exit (1);
+  }
+  echo "OK: " . $out_msg . "\n";
+  exit(0);
+
+  /* print usage */
+  function usage () {
+    echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -s <namenode bean name> -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled\n";
+  }
+?>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_capacity.php
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_capacity.php b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_capacity.php
new file mode 100644
index 0000000..af72723
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hdfs_capacity.php
@@ -0,0 +1,109 @@
+<?php
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This plugin makes call to master node, get the jmx-json document
+ * check the % HDFS capacity used >= warn and critical limits.
+ * check_jmx -H hostaddress -p port -w 1 -c 1
+ */
+
+  include "hdp_nagios_init.php";
+
+  $options = getopt ("h:p:w:c:e:k:r:t:s:");
+  if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options)
+      || !array_key_exists('c', $options)) {
+    usage();
+    exit(3);
+  }
+
+  $hosts=$options['h'];
+  $port=$options['p'];
+  $warn=$options['w']; $warn = preg_replace('/%$/', '', $warn);
+  $crit=$options['c']; $crit = preg_replace('/%$/', '', $crit);
+  $keytab_path=$options['k'];
+  $principal_name=$options['r'];
+  $kinit_path_local=$options['t'];
+  $security_enabled=$options['s'];
+  $ssl_enabled=$options['e'];
+
+  /* Kinit if security enabled */
+  $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name);
+  $retcode = $status[0];
+  $output = $status[1];
+  
+  if ($output != 0) {
+    echo "CRITICAL: Error doing kinit for nagios. $output";
+    exit (2);
+  }
+
+  $protocol = ($ssl_enabled == "true" ? "https" : "http");
+
+
+  foreach (preg_split('/,/', $hosts) as $host) {
+    /* Get the json document */
+    $ch = curl_init();
+    $username = rtrim(`id -un`, "\n");
+    curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=NameNode,name=FSNamesystemState",
+                                  CURLOPT_RETURNTRANSFER => true,
+                                  CURLOPT_HTTPAUTH => CURLAUTH_ANY,
+                                  CURLOPT_USERPWD => "$username:",
+                                  CURLOPT_SSL_VERIFYPEER => FALSE ));
+    $json_string = curl_exec($ch);
+    $info = curl_getinfo($ch);
+    if (intval($info['http_code']) == 401){
+      logout();
+      $json_string = curl_exec($ch);
+    }
+    $info = curl_getinfo($ch);
+    curl_close($ch);
+    $json_array = json_decode($json_string, true);
+    $percent = 0;
+    $object = $json_array['beans'][0];
+    $CapacityUsed = $object['CapacityUsed'];
+    $CapacityRemaining = $object['CapacityRemaining'];
+    if (count($object) == 0) {
+      echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n";
+      exit(2);
+    }    
+    $CapacityTotal = $CapacityUsed + $CapacityRemaining;
+    if($CapacityTotal == 0) {
+      $percent = 0;
+    } else {
+      $percent = ($CapacityUsed/$CapacityTotal)*100;
+      break;
+    }
+  }
+  $out_msg = "DFSUsedGB:<" . round ($CapacityUsed/(1024*1024*1024),1) .
+             ">, DFSTotalGB:<" . round($CapacityTotal/(1024*1024*1024),1) . ">";
+
+  if ($percent >= $crit) {
+    echo "CRITICAL: " . $out_msg . "\n";
+    exit (2);
+  }
+  if ($percent >= $warn) {
+    echo "WARNING: " . $out_msg . "\n";
+    exit (1);
+  }
+  echo "OK: " . $out_msg . "\n";
+  exit(0);
+
+  /* print usage */
+  function usage () {
+    echo "Usage: $0 -h <host> -p port -w <warn%> -c <crit%> -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled\n";
+  }
+?>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hive_metastore_status.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hive_metastore_status.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hive_metastore_status.sh
new file mode 100644
index 0000000..640c077
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hive_metastore_status.sh
@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+#The uri is of the form thrift://<hostname>:<port>
+HOST=$1
+PORT=$2
+JAVA_HOME=$3
+SEC_ENABLED=$4
+if [[ "$SEC_ENABLED" == "true" ]]; then
+  NAGIOS_KEYTAB=$5
+  NAGIOS_USER=$6
+  KINIT_PATH=$7
+  out1=`${KINIT_PATH} -kt ${NAGIOS_KEYTAB} ${NAGIOS_USER} 2>&1`
+  if [[ "$?" -ne 0 ]]; then
+    echo "CRITICAL: Error doing kinit for nagios [$out1]";
+    exit 2;
+  fi
+fi
+HCAT_URL=-Dhive.metastore.uris="thrift://$HOST:$PORT"
+export JAVA_HOME=$JAVA_HOME
+out=`hcat $HCAT_URL -e "show databases" 2>&1`
+if [[ "$?" -ne 0 ]]; then
+  echo "CRITICAL: Error accessing Hive Metastore status [$out]";
+  exit 2;
+fi
+echo "OK: Hive Metastore status OK";
+exit 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hue_status.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hue_status.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hue_status.sh
new file mode 100644
index 0000000..076d9b3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_hue_status.sh
@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+status=`/etc/init.d/hue status 2>&1`
+
+if [[ "$?" -ne 0 ]]; then
+	echo "WARNING: Hue is stopped";
+	exit 1;
+fi
+
+echo "OK: Hue is running";
+exit 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_mapred_local_dir_used.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_mapred_local_dir_used.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_mapred_local_dir_used.sh
new file mode 100644
index 0000000..15c85eb
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_mapred_local_dir_used.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+MAPRED_LOCAL_DIRS=$1
+CRITICAL=`echo $2 | cut -d % -f 1`
+IFS=","
+for mapred_dir in $MAPRED_LOCAL_DIRS
+do
+  percent=`df -hl $mapred_dir | awk '{percent=$5;} END{print percent}' | cut -d % -f 1`
+  if [ $percent -ge $CRITICAL ]; then
+    echo "CRITICAL: MapReduce local dir is full."
+    exit 2
+  fi
+done
+echo "OK: MapReduce local dir space is available."
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_name_dir_status.php
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_name_dir_status.php b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_name_dir_status.php
new file mode 100644
index 0000000..186166d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_name_dir_status.php
@@ -0,0 +1,93 @@
+<?php
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This plugin makes call to namenode, get the jmx-json document
+ * check the NameDirStatuses to find any offline (failed) directories
+ * check_jmx -H hostaddress -p port -k keytab path -r principal name -t kinit path -s security enabled
+ */
+ 
+  include "hdp_nagios_init.php";
+
+  $options = getopt("h:p:e:k:r:t:s:");
+  //Check only for mandatory options
+  if (!array_key_exists('h', $options) || !array_key_exists('p', $options)) {
+    usage();
+    exit(3);
+  }
+
+  $host=$options['h'];
+  $port=$options['p'];
+  $keytab_path=$options['k'];
+  $principal_name=$options['r'];
+  $kinit_path_local=$options['t'];
+  $security_enabled=$options['s'];
+  $ssl_enabled=$options['e'];
+  
+  /* Kinit if security enabled */
+  $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name);
+  $retcode = $status[0];
+  $output = $status[1];
+  
+  if ($output != 0) {
+    echo "CRITICAL: Error doing kinit for nagios. $output";
+    exit (2);
+  }
+
+  $protocol = ($ssl_enabled == "true" ? "https" : "http");
+
+  /* Get the json document */
+  $ch = curl_init();
+  $username = rtrim(`id -un`, "\n");
+  curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=NameNode,name=NameNodeInfo",
+                                CURLOPT_RETURNTRANSFER => true,
+                                CURLOPT_HTTPAUTH => CURLAUTH_ANY,
+                                CURLOPT_USERPWD => "$username:",
+                                CURLOPT_SSL_VERIFYPEER => FALSE ));
+  $json_string = curl_exec($ch);
+  $info = curl_getinfo($ch);
+  if (intval($info['http_code']) == 401){
+    logout();
+    $json_string = curl_exec($ch);
+  }
+  $info = curl_getinfo($ch);
+  curl_close($ch);
+  $json_array = json_decode($json_string, true);
+  $object = $json_array['beans'][0];
+  if ($object['NameDirStatuses'] == "") {
+    echo "WARNING: NameNode directory status not available via ".$protocol."://".$host.":".$port."/jmx url, code " . $info['http_code'] ."\n";
+    exit(1);
+  }
+  $NameDirStatuses = json_decode($object['NameDirStatuses'], true);
+  $failed_dir_count = count($NameDirStatuses['failed']);
+  $out_msg = "CRITICAL: Offline NameNode directories: ";
+  if ($failed_dir_count > 0) {
+    foreach ($NameDirStatuses['failed'] as $key => $value) {
+      $out_msg = $out_msg . $key . ":" . $value . ", ";
+    }
+    echo $out_msg . "\n";
+    exit (2);
+  }
+  echo "OK: All NameNode directories are active" . "\n";
+  exit(0);
+
+  /* print usage */
+  function usage () {
+    echo "Usage: $0 -h <host> -p port -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled";
+  }
+?>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_namenodes_ha.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_namenodes_ha.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_namenodes_ha.sh
new file mode 100644
index 0000000..50b075a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_namenodes_ha.sh
@@ -0,0 +1,82 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+IFS=',' read -a namenodes <<< "$1"
+port=$2
+totalNN=${#namenodes[@]}
+activeNN=()
+standbyNN=()
+unavailableNN=()
+
+for nn in "${namenodes[@]}"
+do
+  status=$(curl -m 5 -s http://$nn:$port/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem | grep -i "tag.HAState" | grep -o -E "standby|active")
+  if [ "$status" == "active" ]; then
+    activeNN[${#activeNN[*]}]="$nn"
+  elif [ "$status" == "standby" ]; then
+    standbyNN[${#standbyNN[*]}]="$nn"
+  elif [ "$status" == "" ]; then
+    unavailableNN[${#unavailableNN[*]}]="$nn"
+  fi
+done
+
+message=""
+critical=false
+
+if [ ${#activeNN[@]} -gt 1 ]; then
+  critical=true
+  message=$message" Only one NN can have HAState=active;"
+elif [ ${#activeNN[@]} == 0 ]; then
+  critical=true
+  message=$message" No Active NN available;"
+elif [ ${#standbyNN[@]} == 0 ]; then
+  critical=true
+  message=$message" No Standby NN available;"
+fi
+
+NNstats=" Active<"
+for nn in "${activeNN[@]}"
+do
+  NNstats="$NNstats$nn;"
+done
+NNstats=${NNstats%\;}
+NNstats=$NNstats">, Standby<"
+for nn in "${standbyNN[@]}"
+do
+  NNstats="$NNstats$nn;"
+done
+NNstats=${NNstats%\;}
+NNstats=$NNstats">, Unavailable<"
+for nn in "${unavailableNN[@]}"
+do
+  NNstats="$NNstats$nn;"
+done
+NNstats=${NNstats%\;}
+NNstats=$NNstats">"
+
+if [ $critical == false ]; then
+  echo "OK: NameNode HA healthy;"$NNstats
+  exit 0
+fi
+
+echo "CRITICAL:"$message$NNstats
+exit 2

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_nodemanager_health.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_nodemanager_health.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_nodemanager_health.sh
new file mode 100644
index 0000000..020b41d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_nodemanager_health.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+HOST=$1
+PORT=$2
+NODEMANAGER_URL="http://$HOST:$PORT/ws/v1/node/info"
+SEC_ENABLED=$3
+export PATH="/usr/bin:$PATH"
+if [[ "$SEC_ENABLED" == "true" ]]; then
+  NAGIOS_KEYTAB=$4
+  NAGIOS_USER=$5
+  KINIT_PATH=$6
+  out1=`${KINIT_PATH} -kt ${NAGIOS_KEYTAB} ${NAGIOS_USER} 2>&1`
+  if [[ "$?" -ne 0 ]]; then
+    echo "CRITICAL: Error doing kinit for nagios [$out1]";
+    exit 2;
+  fi
+fi
+
+RESPONSE=`curl --negotiate -u : -s $NODEMANAGER_URL`
+if [[ "$RESPONSE" == *'"nodeHealthy":true'* ]]; then 
+  echo "OK: NodeManager healthy";
+  exit 0;
+fi
+echo "CRITICAL: NodeManager unhealthy";
+exit 2;

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_oozie_status.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_oozie_status.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_oozie_status.sh
new file mode 100644
index 0000000..820ee99
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_oozie_status.sh
@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# OOZIE_URL is of the form http://<hostname>:<port>/oozie
+HOST=`echo $1 | tr '[:upper:]' '[:lower:]'`
+PORT=$2
+JAVA_HOME=$3
+SEC_ENABLED=$4
+if [[ "$SEC_ENABLED" == "true" ]]; then
+  NAGIOS_KEYTAB=$5
+  NAGIOS_USER=$6
+  KINIT_PATH=$7
+  out1=`${KINIT_PATH} -kt ${NAGIOS_KEYTAB} ${NAGIOS_USER} 2>&1`
+  if [[ "$?" -ne 0 ]]; then
+    echo "CRITICAL: Error doing kinit for nagios [$out1]";
+    exit 2;
+  fi
+fi
+OOZIE_URL="http://$HOST:$PORT/oozie"
+export JAVA_HOME=$JAVA_HOME
+out=`oozie admin -oozie ${OOZIE_URL} -status 2>&1`
+if [[ "$?" -ne 0 ]]; then 
+  echo "CRITICAL: Error accessing Oozie Server status [$out]";
+  exit 2;
+fi
+echo "OK: Oozie Server status [$out]";
+exit 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_rpcq_latency.php
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_rpcq_latency.php b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_rpcq_latency.php
new file mode 100644
index 0000000..463f69b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_rpcq_latency.php
@@ -0,0 +1,104 @@
+<?php
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This plugin makes call to master node, get the jmx-json document
+ * It checks the rpc wait time in the queue, RpcQueueTime_avg_time
+ * check_rpcq_latency -h hostaddress -p port -t ServiceName -w 1 -c 1
+ * Warning and Critical values are in seconds
+ * Service Name = JobTracker, NameNode, JobHistoryServer
+ */
+
+  include "hdp_nagios_init.php";
+
+  $options = getopt ("h:p:w:c:n:e:k:r:t:s:");
+  if (!array_key_exists('h', $options) || !array_key_exists('p', $options) || !array_key_exists('w', $options)
+      || !array_key_exists('c', $options) || !array_key_exists('n', $options)) {
+    usage();
+    exit(3);
+  }
+
+  $host=$options['h'];
+  $port=$options['p'];
+  $master=$options['n'];
+  $warn=$options['w'];
+  $crit=$options['c'];
+  $keytab_path=$options['k'];
+  $principal_name=$options['r'];
+  $kinit_path_local=$options['t'];
+  $security_enabled=$options['s'];
+  $ssl_enabled=$options['e'];
+
+  /* Kinit if security enabled */
+  $status = kinit_if_needed($security_enabled, $kinit_path_local, $keytab_path, $principal_name);
+  $retcode = $status[0];
+  $output = $status[1];
+  
+  if ($output != 0) {
+    echo "CRITICAL: Error doing kinit for nagios. $output";
+    exit (2);
+  }
+
+  $protocol = ($ssl_enabled == "true" ? "https" : "http");
+
+
+  /* Get the json document */
+  $ch = curl_init();
+  $username = rtrim(`id -un`, "\n");
+  curl_setopt_array($ch, array( CURLOPT_URL => $protocol."://".$host.":".$port."/jmx?qry=Hadoop:service=".$master.",name=RpcActivityForPort*",
+                                CURLOPT_RETURNTRANSFER => true,
+                                CURLOPT_HTTPAUTH => CURLAUTH_ANY,
+                                CURLOPT_USERPWD => "$username:",
+                                CURLOPT_SSL_VERIFYPEER => FALSE ));
+  $json_string = curl_exec($ch);
+  $info = curl_getinfo($ch);
+  if (intval($info['http_code']) == 401){
+    logout();
+    $json_string = curl_exec($ch);
+  }
+  $info = curl_getinfo($ch);
+  curl_close($ch);
+  $json_array = json_decode($json_string, true);
+  $object = $json_array['beans'][0];
+  if (count($object) == 0) {
+    echo "CRITICAL: Data inaccessible, Status code = ". $info['http_code'] ."\n";
+    exit(2);
+  } 
+  $RpcQueueTime_avg_time = round($object['RpcQueueTime_avg_time'], 2); 
+  $RpcProcessingTime_avg_time = round($object['RpcProcessingTime_avg_time'], 2);
+
+  $out_msg = "RpcQueueTime_avg_time:<" . $RpcQueueTime_avg_time .
+             "> Secs, RpcProcessingTime_avg_time:<" . $RpcProcessingTime_avg_time .
+             "> Secs";
+
+  if ($RpcQueueTime_avg_time >= $crit) {
+    echo "CRITICAL: " . $out_msg . "\n";
+    exit (2);
+  }
+  if ($RpcQueueTime_avg_time >= $warn) {
+    echo "WARNING: " . $out_msg . "\n";
+    exit (1);
+  }
+  echo "OK: " . $out_msg . "\n";
+  exit(0);
+
+  /* print usage */
+  function usage () {
+    echo "Usage: $0 -h <host> -p port -n <JobTracker/NameNode/JobHistoryServer> -w <warn_in_sec> -c <crit_in_sec> -k keytab path -r principal name -t kinit path -s security enabled -e ssl enabled\n";
+  }
+?>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ae534ed3/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_templeton_status.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_templeton_status.sh b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_templeton_status.sh
new file mode 100644
index 0000000..7fbc4c4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/NAGIOS/package/files/check_templeton_status.sh
@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+# out='{"status":"ok","version":"v1"}<status_code:200>'
+HOST=$1
+PORT=$2
+VERSION=$3
+SEC_ENABLED=$4
+if [[ "$SEC_ENABLED" == "true" ]]; then 
+  NAGIOS_KEYTAB=$5
+  NAGIOS_USER=$6
+  KINIT_PATH=$7
+  out1=`${KINIT_PATH} -kt ${NAGIOS_KEYTAB} ${NAGIOS_USER} 2>&1`
+  if [[ "$?" -ne 0 ]]; then
+    echo "CRITICAL: Error doing kinit for nagios [$out1]";
+    exit 2;
+  fi
+fi
+regex="^.*\"status\":\"ok\".*<status_code:200>$"
+out=`curl --negotiate -u : -s -w '<status_code:%{http_code}>' http://$HOST:$PORT/templeton/$VERSION/status 2>&1`
+if [[ $out =~ $regex ]]; then
+  out=`echo "$out" | sed -e 's/{/[/g' | sed -e 's/}/]/g'` 
+  echo "OK: WebHCat Server status [$out]";
+  exit 0;
+fi
+echo "CRITICAL: Error accessing WebHCat Server, status [$out]";
+exit 2;