You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sm...@apache.org on 2015/12/12 17:30:01 UTC

ambari git commit: AMBARI-14350. AMS is broken after redeploy cluster (Aravindan Vijayan via smohanty)

Repository: ambari
Updated Branches:
  refs/heads/trunk 6944d61f0 -> 82f449dd1


AMBARI-14350. AMS is broken after redeploy cluster (Aravindan Vijayan via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/82f449dd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/82f449dd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/82f449dd

Branch: refs/heads/trunk
Commit: 82f449dd11dc8880c5a9e7fffa5456bdbe83c808
Parents: 6944d61
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sat Dec 12 08:29:49 2015 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sat Dec 12 08:29:49 2015 -0800

----------------------------------------------------------------------
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |   8 +
 .../0.1.0/package/scripts/hbase.py              |  12 +
 .../0.1.0/package/scripts/hbase.py.orig         | 257 ++++++++++++++
 .../AMBARI_METRICS/test_metrics_collector.py    |  22 ++
 .../test_metrics_collector.py.orig              | 338 +++++++++++++++++++
 5 files changed, 637 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/82f449dd/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
index 733fcc0..8fdf4ba 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/ams.py
@@ -187,6 +187,10 @@ def ams(name=None):
               recursive=True
     )
 
+    Execute(('chown', '-R', params.ams_user, params.ams_collector_conf_dir),
+            sudo=True
+            )
+
     Directory(params.ams_checkpoint_dir,
               owner=params.ams_user,
               group=params.user_group,
@@ -194,6 +198,10 @@ def ams(name=None):
               recursive=True
     )
 
+    Execute(('chown', '-R', params.ams_user, params.ams_checkpoint_dir),
+            sudo=True
+            )
+
     XmlConfig("ams-site.xml",
               conf_dir=params.ams_collector_conf_dir,
               configurations=params.config['configurations']['ams-site'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/82f449dd/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
index b33a62f..2ddb8fe 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
@@ -97,12 +97,20 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
       recursive = True
   )
 
+  Execute(('chown', '-R', params.hbase_user, params.hbase_conf_dir),
+          sudo=True
+          )
+
   Directory (params.hbase_tmp_dir,
              owner = params.hbase_user,
              cd_access="a",
              recursive = True
   )
 
+  Execute(('chown', '-R', params.hbase_user, params.hbase_tmp_dir),
+          sudo=True
+          )
+
   Directory (os.path.join(params.local_dir, "jars"),
              owner = params.hbase_user,
              group = params.user_group,
@@ -232,6 +240,10 @@ def hbase(name=None # 'master' or 'regionserver' or 'client'
                 recursive = True
       )
 
+      Execute(('chown', '-R', params.hbase_user, local_root_dir),
+              sudo=True
+              )
+
       File(format("{params.hbase_pid_dir}/distributed_mode"), action="delete", owner=params.hbase_user)
 
   if params.hbase_log4j_props is not None:

http://git-wip-us.apache.org/repos/asf/ambari/blob/82f449dd/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py.orig
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py.orig b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py.orig
new file mode 100644
index 0000000..b33a62f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py.orig
@@ -0,0 +1,257 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os
+from ambari_commons import OSConst
+from resource_management import *
+from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def hbase(name=None, action = None):
+  import params
+  Directory(params.hbase_conf_dir,
+            owner = params.hadoop_user,
+            recursive = True
+  )
+  Directory(params.hbase_tmp_dir,
+             recursive = True,
+             owner = params.hadoop_user
+  )
+
+  Directory (os.path.join(params.local_dir, "jars"),
+             owner = params.hadoop_user,
+             recursive = True
+  )
+
+  XmlConfig("hbase-site.xml",
+            conf_dir = params.hbase_conf_dir,
+            configurations = params.config['configurations']['ams-hbase-site'],
+            configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'],
+            owner = params.hadoop_user
+  )
+
+  if 'ams-hbase-policy' in params.config['configurations']:
+    XmlConfig("hbase-policy.xml",
+              conf_dir = params.hbase_conf_dir,
+              configurations = params.config['configurations']['ams-hbase-policy'],
+              configuration_attributes=params.config['configuration_attributes']['ams-hbase-policy'],
+              owner = params.hadoop_user
+    )
+  # Manually overriding ownership of file installed by hadoop package
+  else:
+    File(os.path.join(params.hbase_conf_dir, "hbase-policy.xml"),
+          owner = params.hadoop_user
+    )
+
+  # Metrics properties
+  File(os.path.join(params.hbase_conf_dir, "hadoop-metrics2-hbase.properties"),
+       owner = params.hbase_user,
+       content=Template("hadoop-metrics2-hbase.properties.j2")
+  )
+
+  hbase_TemplateConfig('regionservers', user=params.hadoop_user)
+
+  if params.security_enabled:
+    hbase_TemplateConfig(format("hbase_{name}_jaas.conf"), user=params.hadoop_user)
+
+  if name != "client":
+    Directory (params.hbase_log_dir,
+               owner = params.hadoop_user,
+               recursive = True
+    )
+
+  if (params.hbase_log4j_props != None):
+    File(os.path.join(params.hbase_conf_dir, "log4j.properties"),
+         owner=params.hadoop_user,
+         content=params.hbase_log4j_props
+    )
+  elif (os.path.exists(os.path.join(params.hbase_conf_dir,"log4j.properties"))):
+    File(os.path.join(params.hbase_conf_dir,"log4j.properties"),
+         owner=params.hadoop_user
+    )
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def hbase(name=None # 'master' or 'regionserver' or 'client'
+          , action=None):
+  import params
+
+  Directory(params.hbase_conf_dir,
+      owner = params.hbase_user,
+      group = params.user_group,
+      recursive = True
+  )
+
+  Directory (params.hbase_tmp_dir,
+             owner = params.hbase_user,
+             cd_access="a",
+             recursive = True
+  )
+
+  Directory (os.path.join(params.local_dir, "jars"),
+             owner = params.hbase_user,
+             group = params.user_group,
+             cd_access="a",
+             mode=0775,
+             recursive = True
+  )
+
+  merged_ams_hbase_site = {}
+  merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-site'])
+  if params.security_enabled:
+    merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-security-site'])
+
+  XmlConfig("hbase-site.xml",
+            conf_dir = params.hbase_conf_dir,
+            configurations = merged_ams_hbase_site,
+            configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'],
+            owner = params.hbase_user,
+            group = params.user_group
+  )
+
+  # Phoenix spool file dir if not /tmp
+  if not os.path.exists(params.phoenix_server_spool_dir):
+    Directory(params.phoenix_server_spool_dir,
+              owner=params.ams_user,
+              mode = 0755,
+              group=params.user_group,
+              cd_access="a",
+              recursive=True
+    )
+  pass
+
+  if 'ams-hbase-policy' in params.config['configurations']:
+    XmlConfig("hbase-policy.xml",
+            conf_dir = params.hbase_conf_dir,
+            configurations = params.config['configurations']['ams-hbase-policy'],
+            configuration_attributes=params.config['configuration_attributes']['ams-hbase-policy'],
+            owner = params.hbase_user,
+            group = params.user_group
+    )
+  # Manually overriding ownership of file installed by hadoop package
+  else:
+    File( format("{params.hbase_conf_dir}/hbase-policy.xml"),
+      owner = params.hbase_user,
+      group = params.user_group
+    )
+
+  File(format("{hbase_conf_dir}/hbase-env.sh"),
+       owner = params.hbase_user,
+       content=InlineTemplate(params.hbase_env_sh_template)
+  )
+
+  # Metrics properties
+  File(os.path.join(params.hbase_conf_dir, "hadoop-metrics2-hbase.properties"),
+         owner = params.hbase_user,
+         group = params.user_group,
+         content=Template("hadoop-metrics2-hbase.properties.j2")
+    )
+
+  # hbase_TemplateConfig( params.metric_prop_file_name,
+  #   tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
+  # )
+
+  hbase_TemplateConfig('regionservers', user=params.hbase_user)
+
+  if params.security_enabled:
+    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"), user=params.hbase_user)
+    hbase_TemplateConfig( format("hbase_client_jaas.conf"), user=params.hbase_user)
+    hbase_TemplateConfig( format("ams_zookeeper_jaas.conf"), user=params.hbase_user)
+
+  if name != "client":
+    Directory( params.hbase_pid_dir,
+               owner = params.hbase_user,
+               recursive = True,
+               cd_access = "a",
+               mode = 0755,
+    )
+
+    Directory (params.hbase_log_dir,
+               owner = params.hbase_user,
+               recursive = True,
+               cd_access = "a",
+               mode = 0755,
+    )
+
+  if name == "master":
+
+    if not params.is_local_fs_rootdir:
+      # If executing Stop All, HDFS is probably down
+      if action != 'stop':
+
+        params.HdfsResource(params.hbase_root_dir,
+                             type="directory",
+                             action="create_on_execute",
+                             owner=params.hbase_user,
+                             mode=0775,
+                             dfs_type=params.dfs_type
+        )
+
+        params.HdfsResource(params.hbase_staging_dir,
+                             type="directory",
+                             action="create_on_execute",
+                             owner=params.hbase_user,
+                             mode=0711,
+                             dfs_type=params.dfs_type
+        )
+
+        params.HdfsResource(None, action="execute")
+
+      if params.is_hbase_distributed:
+        #Workaround for status commands not aware of operating mode
+        File(format("{params.hbase_pid_dir}/distributed_mode"), action="create", mode=0644, owner=params.hbase_user)
+
+      pass
+
+    else:
+
+      local_root_dir = params.hbase_root_dir
+      #cut protocol name
+      if local_root_dir.startswith("file://"):
+        local_root_dir = local_root_dir[7:]
+        #otherwise assume dir name is provided as is
+
+      Directory(local_root_dir,
+                owner = params.hbase_user,
+                cd_access="a",
+                recursive = True
+      )
+
+      File(format("{params.hbase_pid_dir}/distributed_mode"), action="delete", owner=params.hbase_user)
+
+  if params.hbase_log4j_props is not None:
+    File(format("{params.hbase_conf_dir}/log4j.properties"),
+         mode=0644,
+         group=params.user_group,
+         owner=params.hbase_user,
+         content=params.hbase_log4j_props
+    )
+  elif os.path.exists(format("{params.hbase_conf_dir}/log4j.properties")):
+    File(format("{params.hbase_conf_dir}/log4j.properties"),
+      mode=0644,
+      group=params.user_group,
+      owner=params.hbase_user
+    )
+
+def hbase_TemplateConfig(name, tag=None, user=None):
+  import params
+
+  TemplateConfig( os.path.join(params.hbase_conf_dir, name),
+      owner = user,
+      template_tag = tag
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/82f449dd/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
index 06729a7..bccf865 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -116,12 +116,20 @@ class TestMetricsCollector(RMFTestCase):
                               group = 'hadoop',
                               recursive = True
     )
+
+    self.assertResourceCalled('Execute', ('chown','-R', u'ams', '/etc/ambari-metrics-collector/conf'),
+                              sudo=True)
+
     self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/checkpoint',
                               owner = 'ams',
                               group = 'hadoop',
                               cd_access = 'a',
                               recursive = True
     )
+
+    self.assertResourceCalled('Execute', ('chown','-R', u'ams', '/var/lib/ambari-metrics-collector/checkpoint'),
+                              sudo=True)
+
     self.assertResourceCalled('XmlConfig', 'ams-site.xml',
                               owner = 'ams',
                               group = 'hadoop',
@@ -220,11 +228,19 @@ class TestMetricsCollector(RMFTestCase):
                               group = 'hadoop',
                               recursive = True
     )
+
+    self.assertResourceCalled('Execute', ('chown','-R', u'ams', '/etc/ams-hbase/conf'),
+                                sudo=True)
+
     self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase-tmp',
                               owner = 'ams',
                               cd_access = 'a',
                               recursive = True
     )
+
+    self.assertResourceCalled('Execute', ('chown','-R', u'ams', '/var/lib/ambari-metrics-collector/hbase-tmp'),
+                            sudo=True)
+
     self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase-tmp/local/jars',
                               owner = 'ams',
                               cd_access = 'a',
@@ -326,6 +342,12 @@ class TestMetricsCollector(RMFTestCase):
                                   cd_access="a",
                                   recursive = True
         )
+
+      if (not distributed):
+        self.assertResourceCalled('Execute', ('chown','-R','ams', '/var/lib/ambari-metrics-collector/hbase'),
+                                  sudo=True)
+
+
         self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//distributed_mode',
                                   owner = 'ams',
                                   action = ['delete']

http://git-wip-us.apache.org/repos/asf/ambari/blob/82f449dd/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py.orig
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py.orig b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py.orig
new file mode 100644
index 0000000..06729a7
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py.orig
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+
+@patch("os.path.exists", new = MagicMock(return_value=True))
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+class TestMetricsCollector(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "AMBARI_METRICS/0.1.0/package"
+  STACK_VERSION = "2.0.6"
+
+  def test_start_default_distributed(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metrics_collector.py",
+                       classname = "AmsCollector",
+                       command = "start",
+                       config_file="default.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.maxDiff=None
+    self.assert_hbase_configure('master', distributed=True)
+    self.assert_hbase_configure('regionserver', distributed=True)
+    self.assert_ams('collector', distributed=True)
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf stop zookeeper',
+                              on_timeout = 'ls /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid`',
+                              timeout = 30,
+                              user = 'ams'
+    )
+    self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid',
+                              action = ['delete']
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf stop master',
+                              on_timeout = 'ls /var/run/ambari-metrics-collector//hbase-ams-master.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid`',
+                              timeout = 30,
+                              user = 'ams'
+    )
+    self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//hbase-ams-master.pid',
+                              action = ['delete']
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf stop regionserver',
+                              on_timeout = 'ls /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid` >/dev/null 2>&1 && kill -9 `cat /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid`',
+                              timeout = 30,
+                              user = 'ams'
+    )
+    self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//hbase-ams-regionserver.pid',
+                              action = ['delete']
+    )
+    self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf --distributed stop',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf start zookeeper',
+                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-zookeeper.pid` >/dev/null 2>&1',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf start master',
+                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-master.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-master.pid` >/dev/null 2>&1',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', '/usr/lib/ams-hbase/bin/hbase-daemon.sh --config /etc/ams-hbase/conf start regionserver',
+                              not_if = 'ls /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid >/dev/null 2>&1 && ps `cat /var/run/ambari-metrics-collector//hbase-ams-regionserver.pid` >/dev/null 2>&1',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh rm -rf /var/lib/ambari-metrics-collector/hbase-tmp/*.tmp',
+    )
+    self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf --distributed start',
+                              user = 'ams'
+    )
+    self.assertNoMoreResources()
+
+  def test_start_default_embedded(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metrics_collector.py",
+                       classname = "AmsCollector",
+                       command = "start",
+                       config_file="default_ams_embedded.json",
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.maxDiff=None
+    self.assert_hbase_configure('master')
+    self.assert_hbase_configure('regionserver')
+    self.assert_ams('collector')
+    self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf stop',
+                              user = 'ams'
+    )
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh rm -rf /var/lib/ambari-metrics-collector/hbase-tmp/*.tmp',
+    )
+    self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase-tmp/zookeeper',
+                              action = ['delete']
+    )
+    self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-collector --config /etc/ambari-metrics-collector/conf start',
+                              user = 'ams'
+    )
+    self.assertNoMoreResources()
+
+  def assert_ams(self, name=None, distributed=False):
+    self.assertResourceCalled('Directory', '/etc/ambari-metrics-collector/conf',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/checkpoint',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              cd_access = 'a',
+                              recursive = True
+    )
+    self.assertResourceCalled('XmlConfig', 'ams-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = self.getConfig()['configurations']['ams-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+    )
+    merged_ams_hbase_site = {}
+    merged_ams_hbase_site.update(self.getConfig()['configurations']['ams-hbase-site'])
+    merged_ams_hbase_site['phoenix.query.maxGlobalMemoryPercentage'] = '25'
+    merged_ams_hbase_site['phoenix.spool.directory'] = '/tmp'
+
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ambari-metrics-collector/conf',
+                              configurations = merged_ams_hbase_site,
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+    )
+    self.assertResourceCalled('File', '/etc/ambari-metrics-collector/conf/log4j.properties',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              content = "\n",
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/etc/ambari-metrics-collector/conf/ams-env.sh',
+                              owner = 'ams',
+                              content = InlineTemplate(self.getConfig()['configurations']['ams-env']['content'])
+    )
+    self.assertResourceCalled('Directory', '/var/log/ambari-metrics-collector',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              cd_access = 'a',
+                              recursive = True,
+                              mode = 0755,
+    )
+    self.assertResourceCalled('Directory', '/var/run/ambari-metrics-collector',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              group = 'hadoop',
+                              recursive = True,
+                              mode=0755,
+    )
+    self.assertResourceCalled('File', '/usr/lib/ams-hbase/bin/hadoop',
+                              owner = 'ams',
+                              mode=0755
+    )
+    self.assertResourceCalled('Directory', '/etc/security/limits.d',
+                              owner = 'root',
+                              group = 'root',
+                              recursive = True
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/ams.conf',
+                              owner='root',
+                              group='root',
+                              mode=0644,
+                              content=Template("ams.conf.j2")
+    )
+    if distributed:
+      self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                                owner = 'ams',
+                                group = 'hadoop',
+                                mode=0644,
+                                conf_dir = '/etc/ambari-metrics-collector/conf',
+                                configurations = self.getConfig()['configurations']['hdfs-site'],
+                                configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+      )
+      self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                                owner = 'ams',
+                                group = 'hadoop',
+                                mode=0644,
+                                conf_dir = '/etc/ams-hbase/conf',
+                                configurations = self.getConfig()['configurations']['hdfs-site'],
+                                configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+      )
+      self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                                owner = 'ams',
+                                group = 'hadoop',
+                                mode=0644,
+                                conf_dir = '/etc/ambari-metrics-collector/conf',
+                                configurations = self.getConfig()['configurations']['core-site'],
+                                configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+      )
+      self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                                owner = 'ams',
+                                group = 'hadoop',
+                                mode=0644,
+                                conf_dir = '/etc/ams-hbase/conf',
+                                configurations = self.getConfig()['configurations']['core-site'],
+                                configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+      )
+
+  def assert_hbase_configure(self, name=None, distributed=False):
+    self.assertResourceCalled('Directory', '/etc/ams-hbase/conf',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase-tmp',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              recursive = True
+    )
+    self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase-tmp/local/jars',
+                              owner = 'ams',
+                              cd_access = 'a',
+                              group = 'hadoop',
+                              mode = 0775,
+                              recursive = True
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ams-hbase/conf',
+                              configurations = self.getConfig()['configurations']['ams-hbase-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+                              )
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              conf_dir = '/etc/ams-hbase/conf',
+                              configurations = self.getConfig()['configurations']['ams-hbase-policy'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['ams-hbase-site']
+    )
+    self.assertResourceCalled('File', '/etc/ams-hbase/conf/hbase-env.sh',
+                              owner = 'ams',
+                              content = InlineTemplate(self.getConfig()['configurations']['ams-hbase-env']['content'])
+                              )
+    self.assertResourceCalled('File', '/etc/ams-hbase/conf/hadoop-metrics2-hbase.properties',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              content = Template('hadoop-metrics2-hbase.properties.j2')
+                              )
+    self.assertResourceCalled('TemplateConfig', '/etc/ams-hbase/conf/regionservers',
+                              owner = 'ams',
+                              template_tag = None,
+                              )
+    self.assertResourceCalled('Directory', '/var/run/ambari-metrics-collector/',
+                              owner = 'ams',
+                              recursive = True,
+                              mode = 0755,
+                              cd_access = "a",
+    )
+    self.assertResourceCalled('Directory', '/var/log/ambari-metrics-collector',
+                              owner = 'ams',
+                              recursive = True,
+                              mode = 0755,
+                              cd_access = "a",
+    )
+
+    if name == 'master':
+      if distributed:
+        self.assertResourceCalled('HdfsResource', 'hdfs://localhost:8020/apps/hbase/data',
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  user = 'hdfs',
+                                  dfs_type = '',
+                                  owner = 'ams',
+                                  mode = 0775,
+                                  hadoop_conf_dir = '/etc/hadoop/conf',
+                                  type = 'directory',
+                                  action = ['create_on_execute'],
+                                  hdfs_site=self.getConfig()['configurations']['hdfs-site'],
+                                  principal_name=UnknownConfigurationMock(),
+                                  default_fs='hdfs://c6401.ambari.apache.org:8020',
+                                  )
+        self.assertResourceCalled('HdfsResource', '/amshbase/staging',
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  user = 'hdfs',
+                                  dfs_type = '',
+                                  owner = 'ams',
+                                  mode = 0711,
+                                  hadoop_conf_dir = '/etc/hadoop/conf',
+                                  type = 'directory',
+                                  action = ['create_on_execute'],
+                                  hdfs_site=self.getConfig()['configurations']['hdfs-site'],
+                                  principal_name=UnknownConfigurationMock(),
+                                  default_fs='hdfs://c6401.ambari.apache.org:8020',
+                                  )
+        self.assertResourceCalled('HdfsResource', None,
+                                  security_enabled = False,
+                                  hadoop_bin_dir = '/usr/bin',
+                                  keytab = UnknownConfigurationMock(),
+                                  kinit_path_local = '/usr/bin/kinit',
+                                  user = 'hdfs',
+                                  hadoop_conf_dir = '/etc/hadoop/conf',
+                                  action = ['execute'],
+                                  hdfs_site=self.getConfig()['configurations']['hdfs-site'],
+                                  principal_name=UnknownConfigurationMock(),
+                                  default_fs='hdfs://c6401.ambari.apache.org:8020',
+                                  )
+        self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//distributed_mode', action=["create"],
+                                  mode=0644, owner='ams')
+      else:
+        self.assertResourceCalled('Directory', '/var/lib/ambari-metrics-collector/hbase',
+                                  owner = 'ams',
+                                  cd_access="a",
+                                  recursive = True
+        )
+        self.assertResourceCalled('File', '/var/run/ambari-metrics-collector//distributed_mode',
+                                  owner = 'ams',
+                                  action = ['delete']
+        )
+    self.assertResourceCalled('File', '/etc/ams-hbase/conf/log4j.properties',
+                              owner = 'ams',
+                              group = 'hadoop',
+                              mode = 0644,
+                              content = "\n"
+    )