You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2015/04/16 00:55:40 UTC

[4/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)

http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py
new file mode 100644
index 0000000..153555e
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py
@@ -0,0 +1,601 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+from unittest import skip
+
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+@patch("os.path.exists", new = MagicMock(return_value=True))
+class TestHbaseRegionServer(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "HBASE/1.1.0.2.3/package"
+  STACK_VERSION = "2.3"
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "configure",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+    
+  def test_start_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "start",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assert_configure_default()
+    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
+      not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
+      user = 'hbase'
+    )
+    self.assertNoMoreResources()
+    
+  def test_stop_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "stop",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver',
+        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`',
+        timeout = 30,
+        user = 'hbase',
+    )
+    
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
+    )
+    self.assertNoMoreResources()
+    
+  def test_configure_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "configure",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assert_configure_secured()
+    self.assertNoMoreResources()
+    
+  def test_start_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "start",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assert_configure_secured()
+    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
+      not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
+      user = 'hbase',
+    )
+    self.assertNoMoreResources()
+    
+  def test_stop_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "stop",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver',
+        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`',
+        timeout = 30,
+        user = 'hbase',
+    )
+    
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid',
+    )
+    self.assertNoMoreResources()
+
+  def assert_configure_default(self):
+    self.assertResourceCalled('Directory', '/etc/hbase',
+      mode = 0755
+    )
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+      owner = 'hbase',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      mode=0775,
+      recursive = True,
+      cd_access='a'
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
+      owner = 'hbase',
+      group = 'hadoop',
+      mode=0775,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
+      owner = 'hbase',
+      group = 'hadoop',
+      mode=0775,
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hdfs-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['hdfs-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-policy'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
+    )
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+      owner = 'hbase',
+      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+      owner = 'hbase',
+      template_tag = 'GANGLIA-RS',
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+      owner = 'hbase',
+      template_tag = None,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
+    self.assertResourceCalled('File',
+                              '/etc/hbase/conf/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2'
+    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6405.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'],
+                              )
+
+  def assert_configure_secured(self):
+    self.assertResourceCalled('Directory', '/etc/hbase',
+      mode = 0755
+    )
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+      owner = 'hbase',
+      group = 'hadoop',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      mode=0775,
+      recursive = True,
+      cd_access='a'
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
+      owner = 'hbase',
+      group = 'hadoop',
+      mode=0775,
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
+      owner = 'hbase',
+      group = 'hadoop',
+      mode=0775,
+      recursive = True,
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['core-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hdfs-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      conf_dir = '/etc/hadoop/conf',
+      configurations = self.getConfig()['configurations']['hdfs-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-policy'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
+    )
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+      owner = 'hbase',
+      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+      owner = 'hbase',
+      template_tag = 'GANGLIA-RS',
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+      owner = 'hbase',
+      template_tag = None,
+    )
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_regionserver_jaas.conf',
+      owner = 'hbase',
+      template_tag = None,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
+    self.assertResourceCalled('File',
+                              '/etc/hbase/conf/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2'
+    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6405.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'],
+                              )
+
+  @skip("there's nothing to upgrade to yet")    
+  def test_start_default_24(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "start",
+                   config_file="hbase-rs-2.4.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES)
+    
+    self.assertResourceCalled('Directory', '/etc/hbase',
+      mode = 0755)
+
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+      owner = 'hbase',
+      group = 'hadoop',
+      recursive = True)
+
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      mode = 0775,
+      recursive = True,
+      cd_access='a')
+
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
+      owner = 'hbase',
+      group = 'hadoop',
+      mode=0775,
+      recursive = True)
+
+    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
+      owner = 'hbase',
+      group = 'hadoop',
+      mode=0775,
+      recursive = True)
+
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site'])
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hdfs-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'])
+
+    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hadoop/conf',
+                              configurations = self.getConfig()['configurations']['hdfs-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'])
+
+    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-policy'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy'])
+
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+      owner = 'hbase',
+      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
+
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
+      owner = 'hbase',
+      template_tag = 'GANGLIA-RS')
+
+    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
+      owner = 'hbase',
+      template_tag = None)
+
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+      owner = 'hbase',
+      recursive = True)
+
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+      owner = 'hbase',
+      recursive = True)
+
+    self.assertResourceCalled('File',
+                              '/etc/hbase/conf/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2')
+
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
+
+    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver',
+      not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
+      user = 'hbase')
+
+    self.assertNoMoreResources()
+
+  @patch("resource_management.libraries.functions.security_commons.build_expectations")
+  @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
+  @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
+  @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
+  @patch("resource_management.libraries.script.Script.put_structured_out")
+  def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
+    # Test that function works when is called with correct parameters
+
+    security_params = {
+      'hbase-site': {
+        'hbase.regionserver.keytab.file': '/path/to/hbase_keytab',
+        'hbase.regionserver.kerberos.principal': 'hbase_principal'
+      }
+    }
+
+    result_issues = []
+    props_value_check = {"hbase.security.authentication": "kerberos",
+                           "hbase.security.authorization": "true"}
+    props_empty_check = ["hbase.regionserver.keytab.file",
+                           "hbase.regionserver.kerberos.principal"]
+
+    props_read_check = ["hbase.regionserver.keytab.file"]
+
+    get_params_mock.return_value = security_params
+    validate_security_config_mock.return_value = result_issues
+
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "security_status",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    build_exp_mock.assert_called_with('hbase-site', props_value_check, props_empty_check, props_read_check)
+    put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
+    cached_kinit_executor_mock.called_with('/usr/bin/kinit',
+                                           self.config_dict['configurations']['hbase-env']['hbase_user'],
+                                           security_params['hbase-site']['hbase.regionserver.keytab.file'],
+                                           security_params['hbase-site']['hbase.regionserver.kerberos.principal'],
+                                           self.config_dict['hostname'],
+                                           '/tmp')
+
+     # Testing that the exception throw by cached_executor is caught
+    cached_kinit_executor_mock.reset_mock()
+    cached_kinit_executor_mock.side_effect = Exception("Invalid command")
+
+    try:
+      self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "security_status",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+      )
+    except:
+      self.assertTrue(True)
+
+    # Testing with a security_params which doesn't contains hbase-site
+    empty_security_params = {}
+    cached_kinit_executor_mock.reset_mock()
+    get_params_mock.reset_mock()
+    put_structured_out_mock.reset_mock()
+    get_params_mock.return_value = empty_security_params
+
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "security_status",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
+
+    # Testing with not empty result_issues
+    result_issues_with_params = {
+      'hbase-site' : "Something bad happened"
+    }
+
+    validate_security_config_mock.reset_mock()
+    get_params_mock.reset_mock()
+    validate_security_config_mock.return_value = result_issues_with_params
+    get_params_mock.return_value = security_params
+
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "security_status",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
+
+    # Testing with security_enable = false
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py",
+                   classname = "HbaseRegionServer",
+                   command = "security_status",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})

http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_service_check.py b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_service_check.py
new file mode 100644
index 0000000..ca6a0d8
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_service_check.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, call, patch
+from stacks.utils.RMFTestCase import *
+from unittest import skip
+import datetime
+import resource_management.libraries.functions
+
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
+class TestServiceCheck(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "HBASE/1.1.0.2.3/package"
+  STACK_VERSION = "2.3"
+
+  def test_service_check_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
+                        classname="HbaseServiceCheck",
+                        command="service_check",
+                        config_file="hbase_default.json",
+                        hdp_stack_version = self.STACK_VERSION,
+                        target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assertResourceCalled('File', '/tmp/hbaseSmokeVerify.sh',
+      content = StaticFile('hbaseSmokeVerify.sh'),
+      mode = 0755,
+    )
+    self.assertResourceCalled('File', '/tmp/hbase-smoke.sh',
+      content = Template('hbase-smoke.sh.j2'),
+      mode = 0755,
+    )
+    self.assertResourceCalled('Execute', ' /usr/hdp/current/hbase-regionserver/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
+      logoutput = True,
+      tries = 3,
+      user = 'ambari-qa',
+      try_sleep = 5,
+    )
+    self.assertResourceCalled('Execute', ' /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/hdp/current/hbase-regionserver/bin/hbase',
+      logoutput = True,
+      tries = 3,
+      user = 'ambari-qa',
+      try_sleep = 5,
+    )
+    self.assertNoMoreResources()
+    
+    
+  def test_service_check_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
+                        classname="HbaseServiceCheck",
+                        command="service_check",
+                        config_file="hbase_secure.json",
+                        hdp_stack_version = self.STACK_VERSION,
+                        target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assertResourceCalled('File', '/tmp/hbaseSmokeVerify.sh',
+      content = StaticFile('hbaseSmokeVerify.sh'),
+      mode = 0755,
+    )
+    self.assertResourceCalled('File', '/tmp/hbase-smoke.sh',
+      content = Template('hbase-smoke.sh.j2'),
+      mode = 0755,
+    )
+    self.assertResourceCalled('File', '/tmp/hbase_grant_permissions.sh',
+      content = Template('hbase_grant_permissions.j2'),
+      owner = 'hbase',
+      group = 'hadoop',
+      mode = 0644,
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase@EXAMPLE.COM; /usr/hdp/current/hbase-regionserver/bin/hbase shell /tmp/hbase_grant_permissions.sh',
+      user = 'hbase',
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; /usr/hdp/current/hbase-regionserver/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
+      logoutput = True,
+      tries = 3,
+      try_sleep = 5,
+      user = 'ambari-qa'
+    )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM; /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/hdp/current/hbase-regionserver/bin/hbase',
+      logoutput = True,
+      tries = 3,
+      try_sleep = 5,
+      user = 'ambari-qa'
+    )
+    self.assertNoMoreResources()
+
+  @skip("there's nothing to upgrade to yet")    
+  def test_service_check_24(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
+                        classname="HbaseServiceCheck",
+                        command="service_check",
+                        config_file="hbase-check-2.4.json",
+                        hdp_stack_version = self.STACK_VERSION,
+                        target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assertResourceCalled('File', '/tmp/hbaseSmokeVerify.sh',
+      content = StaticFile('hbaseSmokeVerify.sh'),
+      mode = 0755,
+    )
+    self.assertResourceCalled('File', '/tmp/hbase-smoke.sh',
+      content = Template('hbase-smoke.sh.j2'),
+      mode = 0755,
+    )
+    self.assertResourceCalled('Execute', ' /usr/hdp/current/hbase-client/bin/hbase --config /etc/hbase/conf shell /tmp/hbase-smoke.sh',
+      logoutput = True,
+      tries = 3,
+      user = 'ambari-qa',
+      try_sleep = 5,
+    )
+    self.assertResourceCalled('Execute', ' /tmp/hbaseSmokeVerify.sh /etc/hbase/conf  /usr/hdp/current/hbase-client/bin/hbase',
+      logoutput = True,
+      tries = 3,
+      user = 'ambari-qa',
+      try_sleep = 5,
+    )
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.3/HBASE/test_phoenix_queryserver.py
new file mode 100644
index 0000000..32a703d
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/HBASE/test_phoenix_queryserver.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+from unittest import skip
+
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+@patch("os.path.exists", new = MagicMock(return_value=True))
+class TestPhoenixQueryServer(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "HBASE/1.1.0.2.3/package"
+  STACK_VERSION = "2.3"
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "configure",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertNoMoreResources()
+    
+  def test_start_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "start",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+  def test_stop_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "stop",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
+        on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
+        timeout = 30,
+    )
+    
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/phoenix-hbase-server.pid',
+    )
+    self.assertNoMoreResources()
+    
+  def test_configure_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "configure",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertNoMoreResources()
+    
+  def test_start_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "start",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+  def test_stop_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "stop",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
+        on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
+        timeout = 30,
+    )
+    
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/phoenix-hbase-server.pid',
+    )
+    self.assertNoMoreResources()
+
+  @skip("there's nothing to upgrade to yet")    
+  def test_start_default_24(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "start",
+                   config_file="hbase-rs-2.4.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES)
+    
+    self.assertResourceCalled('Directory', '/etc/hbase',
+      mode = 0755)
+
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+      owner = 'hbase',
+      group = 'hadoop',
+      recursive = True)
+
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site'])
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+      owner = 'hbase',
+      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
+
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+      owner = 'hbase',
+      recursive = True)
+
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+      owner = 'hbase',
+      recursive = True)
+
+    self.assertResourceCalled('File',
+                              '/usr/lib/phoenix/bin/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2')
+
+
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py start',
+      not_if = 'ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1',
+      user = 'hbase')
+
+    self.assertNoMoreResources()