You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2015/04/21 15:28:56 UTC

[2/5] ambari git commit: AMBARI-10626. Pheonix component is not visible in 2.3 stack (aonishuk)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
deleted file mode 100644
index 462bef4..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
+++ /dev/null
@@ -1,104 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See http://wiki.apache.org/hadoop/GangliaMetrics
-#
-# Make sure you know whether you are using ganglia 3.0 or 3.1.
-# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
-# And, yes, this file is named hadoop-metrics.properties rather than
-# hbase-metrics.properties because we're leveraging the hadoop metrics
-# package and hadoop-metrics.properties is an hardcoded-name, at least
-# for the moment.
-#
-# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
-
-# HBase-specific configuration to reset long-running stats (e.g. compactions)
-# If this variable is left out, then the default is no expiration.
-hbase.extendedperiod = 3600
-
-{% if has_metric_collector %}
-
-*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
-*.sink.timeline.slave.host.name={{hostname}}
-hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.period=10
-hbase.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-jvm.period=10
-jvm.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-rpc.period=10
-rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.sink.timeline.period=10
-hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-{% else %}
-
-# Configuration of the "hbase" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-hbase.period=10
-hbase.servers={{ganglia_server_host}}:8656
-
-# Configuration of the "jvm" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-jvm.period=10
-jvm.servers={{ganglia_server_host}}:8656
-
-# Configuration of the "rpc" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-rpc.period=10
-rpc.servers={{ganglia_server_host}}:8656
-
-#Ganglia following hadoop example
-hbase.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
-hbase.sink.ganglia.period=10
-
-# default for supportsparse is false
-*.sink.ganglia.supportsparse=true
-
-.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
-.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
-
-hbase.sink.ganglia.servers={{ganglia_server_host}}:8656
-
-{% endif %}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase-smoke.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase-smoke.sh.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase-smoke.sh.j2
deleted file mode 100644
index 458da95..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase-smoke.sh.j2
+++ /dev/null
@@ -1,44 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-disable 'ambarismoketest'
-drop 'ambarismoketest'
-create 'ambarismoketest','family'
-put 'ambarismoketest','row01','family:col01','{{service_check_data}}'
-scan 'ambarismoketest'
-exit
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_client_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_client_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_client_jaas.conf.j2
deleted file mode 100644
index 38f9721..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_client_jaas.conf.j2
+++ /dev/null
@@ -1,23 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-Client {
-com.sun.security.auth.module.Krb5LoginModule required
-useKeyTab=false
-useTicketCache=true;
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_grant_permissions.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_grant_permissions.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_grant_permissions.j2
deleted file mode 100644
index 3378983..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_grant_permissions.j2
+++ /dev/null
@@ -1,39 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-grant '{{smoke_test_user}}', '{{smokeuser_permissions}}'
-exit
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_master_jaas.conf.j2
deleted file mode 100644
index a93c36c..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_master_jaas.conf.j2
+++ /dev/null
@@ -1,26 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-Client {
-com.sun.security.auth.module.Krb5LoginModule required
-useKeyTab=true
-storeKey=true
-useTicketCache=false
-keyTab="{{master_keytab_path}}"
-principal="{{master_jaas_princ}}";
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_regionserver_jaas.conf.j2
deleted file mode 100644
index 7097481..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hbase_regionserver_jaas.conf.j2
+++ /dev/null
@@ -1,26 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-Client {
-com.sun.security.auth.module.Krb5LoginModule required
-useKeyTab=true
-storeKey=true
-useTicketCache=false
-keyTab="{{regionserver_keytab_path}}"
-principal="{{regionserver_jaas_princ}}";
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/regionservers.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/regionservers.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/regionservers.j2
deleted file mode 100644
index fc6cc37..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/regionservers.j2
+++ /dev/null
@@ -1,20 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-{% for host in rs_hosts %}{{host}}
-{% endfor %}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/widgets.json b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/widgets.json
deleted file mode 100644
index 1212133..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/widgets.json
+++ /dev/null
@@ -1,434 +0,0 @@
-{
-  "layouts": [
-    {
-      "layout_name": "default_hbase_dashboard",
-      "display_name": "Standard HBase Dashboard",
-      "section_name": "HBASE_SUMMARY",
-      "widgetLayoutInfo": [
-        {
-          "widget_name": "RS_READS_WRITES",
-          "display_name": "RegionServer Reads and Writes",
-          "description": "This widget shows all the read requests and write requests on all regions for a RegionServer",
-          "widget_type": "GRAPH",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "regionserver.Server.Get_num_ops",
-              "metric_path": "metrics/hbase/regionserver/Server/Get_num_ops",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.ScanNext_num_ops",
-              "metric_path": "metrics/hbase/regionserver/Server/ScanNext_num_ops",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Append_num_ops",
-              "metric_path": "metrics/hbase/regionserver/Server/Append_num_ops",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Delete_num_ops",
-              "metric_path": "metrics/hbase/regionserver/Server/Delete_num_ops",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Increment_num_ops",
-              "metric_path": "metrics/hbase/regionserver/Server/Increment_num_ops",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Mutate_num_ops",
-              "metric_path": "metrics/hbase/regionserver/Server/Mutate_num_ops",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "Read Requests",
-              "value": "${regionserver.Server.Get_num_ops + regionserver.Server.ScanNext_num_ops}"
-            },
-            {
-              "name": "Write Requests",
-              "value": "${regionserver.Server.Append_num_ops + regionserver.Server.Delete_num_ops + regionserver.Server.Increment_num_ops + regionserver.Server.Mutate_num_ops}"
-            }
-          ],
-          "properties": {
-            "display_unit": "Requests",
-            "graph_type": "LINE",
-            "time_range": "86400000"
-          }
-        },
-        {
-          "widget_name": "READ_LATENCY_95",
-          "display_name": "95% Read Latency",
-          "description": "This widget shows 95th percentile of the read latency.",
-          "widget_type": "NUMBER",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "regionserver.Server.Get_95th_percentile",
-              "metric_path": "metrics/hbase/regionserver/Server/Get_95th_percentile",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.ScanNext_95th_percentile",
-              "metric_path": "metrics/hbase/regionserver/Server/ScanNext_95th_percentile",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "95% Read Latency",
-              "value": "${regionserver.Server.Get_95th_percentile + regionserver.Server.ScanNext_95th_percentile}"
-            }
-          ],
-          "properties": {
-            "display_unit": ""
-          }
-        },
-        {
-          "widget_name": "WRITE_LATENCY_95",
-          "display_name": "95% Write Latency",
-          "description": "This widget shows 95th percentile of the write latency.",
-          "widget_type": "NUMBER",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "regionserver.Server.Mutate_95th_percentile",
-              "metric_path": "metrics/hbase/regionserver/Server/Mutate_95th_percentile",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Increment_95th_percentile",
-              "metric_path": "metrics/hbase/regionserver/Server/Increment_95th_percentile",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Append_95th_percentile",
-              "metric_path": "metrics/hbase/regionserver/Server/Append_95th_percentile",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "regionserver.Server.Delete_95th_percentile",
-              "metric_path": "metrics/hbase/regionserver/Server/Delete_95th_percentile",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "95% Write Latency",
-              "value": "${regionserver.Server.Mutate_95th_percentile + regionserver.Server.Increment_95th_percentile + regionserver.Server.Append_95th_percentile + regionserver.Server.Delete_95th_percentile}"
-            }
-          ],
-          "properties": {
-            "display_unit": ""
-          }
-        },
-        {
-          "widget_name": "OPEN_CONNECTIONS",
-          "display_name": "Open Connections",
-          "description": "This widget shows number of current open connections",
-          "widget_type": "GRAPH",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "ipc.IPC.numOpenConnections",
-              "metric_path": "metrics/hbase/ipc/IPC/numOpenConnections",
-              "category": "",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "Open Connections",
-              "value": "${ipc.IPC.numOpenConnections}"
-            }
-          ],
-          "properties": {
-            "display_unit": "Connections",
-            "graph_type": "LINE",
-            "time_range": "86400000"
-          }
-        },
-        {
-          "widget_name": "ACTIVE_HANDLER",
-          "display_name": "Active Handlers vs Calls in General Queue",
-          "widget_type": "GRAPH",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "ipc.IPC.numActiveHandler",
-              "metric_path": "metrics/hbase/ipc/IPC/numActiveHandler",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "ipc.IPC.numCallsInGeneralQueue",
-              "metric_path": "metrics/hbase/ipc/IPC/numCallsInGeneralQueue",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "Active Handlers",
-              "value": "${ipc.IPC.numActiveHandler}"
-            },
-            {
-              "name": "Calls in General Queue",
-              "value": "${ipc.IPC.numCallsInGeneralQueue}"
-            }
-          ],
-          "properties": {
-            "graph_type": "LINE",
-            "time_range": "86400000"
-          }
-        },
-        {
-          "widget_name": "FILES_LOCAL",
-          "display_name": "Files Local",
-          "description": "This widget shows percentage of files local.",
-          "widget_type": "NUMBER",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "regionserver.Server.percentFilesLocal",
-              "metric_path": "metrics/hbase/regionserver/percentFilesLocal",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "Files Local",
-              "value": "${regionserver.Server.percentFilesLocal}"
-            }
-          ],
-          "properties": {
-            "display_unit": "%"
-          }
-        },
-        {
-          "widget_name": "UPDATES_BLOCKED_TIME",
-          "display_name": "Updates Blocked Time",
-          "description": "Number of milliseconds updates have been blocked so the memstore can be flushed",
-          "widget_type": "GRAPH",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "regionserver.Server.updatesBlockedTime",
-              "metric_path": "metrics/hbase/regionserver/Server/updatesBlockedTime",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "Updates Blocked Time",
-              "value": "${regionserver.Server.updatesBlockedTime}"
-            }
-          ],
-          "properties": {
-            "display_unit": "ms",
-            "graph_type": "LINE",
-            "time_range": "86400000"
-          }
-        },
-        {
-          "widget_name": "RS_HOST_METRICS",
-          "display_name": "ReqionServer System Metrics",
-          "description": "ReqionServer widget for Host CPU, Network and Disk Utilization",
-          "widget_type": "GRAPH",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "cpu_idle",
-              "metric_path": "metrics/cpu/cpu_idle",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "disk_free",
-              "metric_path": "metrics/disk/disk_free",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "pkts_in",
-              "metric_path": "metrics/network/pkts_in",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            },
-            {
-              "name": "pkts_out",
-              "metric_path": "metrics/network/pkts_out",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "CPU Idle",
-              "value": "${cpu_idle}"
-            },
-            {
-              "name": "Disk Free",
-              "value": "${disk_free}"
-            },
-            {
-              "name": "Network Packets In/Out",
-              "value": "${pkts_in + pkts_out}"
-            }
-          ],
-          "properties": {
-            "graph_type": "LINE",
-            "time_range": "86400000"
-          }
-        }
-      ]
-    },
-    {
-      "layout_name": "default_hbase_heatmap",
-      "display_name": "HBase Heatmaps",
-      "section_name": "HBASE_HEATMAPS",
-      "widgetLayoutInfo": [
-        {
-          "widget_name": "HBASE_COMPACTION_QUEUE_SIZE",
-          "display_name": "HBase Compaction Queue Size",
-          "description": "",
-          "widget_type": "HEATMAP",
-          "is_visible": true,
-          "metrics": [
-            {
-              "name": "Hadoop:service=HBase,name=RegionServer,sub=Server.compactionQueueLength",
-              "metric_path": "metrics/hbase/regionserver/compactionQueueSize",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "HBase Compaction Queue Size",
-              "value": "${Hadoop:service=HBase,name=RegionServer,sub=Server.compactionQueueLength} "
-            }
-          ],
-          "properties": {
-            "display_unit": "",
-            "max_limit": "10"
-          }
-        },
-        {
-          "widget_name": "HBASE_MEMSTORE_SIZES",
-          "display_name": "HBase Memstore Sizes",
-          "description": "",
-          "widget_type": "HEATMAP",
-          "is_visible": false,
-          "metrics": [
-            {
-              "name": "Hadoop:service=HBase,name=RegionServer,sub=Server.memStoreSize",
-              "metric_path": "metrics/hbase/regionserver/memstoreSize",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "HBase Memstore Sizes",
-              "value": "${Hadoop:service=HBase,name=RegionServer,sub=Server.memStoreSize}"
-            }
-          ],
-          "properties": {
-            "display_unit": "B",
-            "max_limit": "104857600"
-          }
-        },
-        {
-          "widget_name": "HBASE_READ_REQUEST",
-          "display_name": "HBase Read Request Count",
-          "description": "",
-          "widget_type": "HEATMAP",
-          "is_visible": false,
-          "metrics": [
-            {
-              "name": "Hadoop:service=HBase,name=RegionServer,sub=Server.readRequestCount",
-              "metric_path": "metrics/hbase/regionserver/readRequestsCount",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "HBase Read Request Count",
-              "value": "${Hadoop:service=HBase,name=RegionServer,sub=Server.readRequestCount}"
-            }
-          ],
-          "properties": {
-            "max_limit": "200"
-          }
-        },
-        {
-          "widget_name": "HBASE_WRITE_REQUEST",
-          "display_name": "HBase Write Request Count",
-          "description": "",
-          "widget_type": "HEATMAP",
-          "is_visible": false,
-          "metrics": [
-            {
-              "name": "Hadoop:service=HBase,name=RegionServer,sub=Server.writeRequestCount",
-              "metric_path": "metrics/hbase/regionserver/writeRequestsCount",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "HBase Write Request Count",
-              "value": "${Hadoop:service=HBase,name=RegionServer,sub=Server.writeRequestCount}"
-            }
-          ],
-          "properties": {
-            "max_limit": "200"
-          }
-        },
-        {
-          "widget_name": "HBASE_REGIONS",
-          "display_name": "HBase Regions",
-          "description": "",
-          "widget_type": "HEATMAP",
-          "is_visible": false,
-          "metrics": [
-            {
-              "name": "Hadoop:service=HBase,name=RegionServer,sub=Server.regionCount",
-              "metric_path": "metrics/hbase/regionserver/regions",
-              "service_name": "HBASE",
-              "component_name": "HBASE_REGIONSERVER"
-            }
-          ],
-          "values": [
-            {
-              "name": "HBase Regions",
-              "value": "${Hadoop:service=HBase,name=RegionServer,sub=Server.regionCount}"
-            }
-          ],
-          "properties": {
-            "max_limit": "10"
-          }
-        }
-      ]
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
index a254e9d..921a225 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/metainfo.xml
@@ -21,6 +21,20 @@
     <service>
       <name>HBASE</name>
       <version>1.1.0.2.3</version>
+      <comment>A Non-relational distributed database, plus Phoenix, a high performance SQL layer for low latency applications.</comment>
+      <components>   
+        <component>
+          <name>PHOENIX_QUERY_SERVER</name>
+          <displayName>Phoenix Query Server</displayName>
+          <category>SLAVE</category>
+          <cardinality>0+</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <commandScript>
+            <script>scripts/phoenix_queryserver.py</script>
+            <scriptType>PYTHON</scriptType>
+          </commandScript>
+        </component>
+      </components>
 
       <osSpecifics>
         <osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
new file mode 100644
index 0000000..d3edd9f
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from mock.mock import MagicMock, patch
+from stacks.utils.RMFTestCase import *
+from unittest import skip
+
+@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
+@patch("os.path.exists", new = MagicMock(return_value=True))
+class TestPhoenixQueryServer(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "HBASE/0.96.0.2.0/package"
+  STACK_VERSION = "2.3"
+
+  def test_configure_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "configure",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertNoMoreResources()
+    
+  def test_start_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "start",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+  def test_stop_default(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "stop",
+                   config_file="hbase_default.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
+        on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
+        timeout = 30,
+    )
+    
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/phoenix-hbase-server.pid',
+    )
+    self.assertNoMoreResources()
+    
+  def test_configure_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "configure",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    
+    self.assertNoMoreResources()
+    
+  def test_start_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "start",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+  def test_stop_secured(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "stop",
+                   config_file="hbase_secure.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES
+    )
+
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py stop',
+        on_timeout = '! ( ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/phoenix-hbase-server.pid`',
+        timeout = 30,
+    )
+    
+    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/phoenix-hbase-server.pid',
+    )
+    self.assertNoMoreResources()
+
+  @skip("there's nothing to upgrade to yet")    
+  def test_start_default_24(self):
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
+                   classname = "PhoenixQueryServer",
+                   command = "start",
+                   config_file="hbase-rs-2.4.json",
+                   hdp_stack_version = self.STACK_VERSION,
+                   target = RMFTestCase.TARGET_COMMON_SERVICES)
+    
+    self.assertResourceCalled('Directory', '/etc/hbase',
+      mode = 0755)
+
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+      owner = 'hbase',
+      group = 'hadoop',
+      recursive = True)
+
+    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
+      owner = 'hbase',
+      group = 'hadoop',
+      conf_dir = '/etc/hbase/conf',
+      configurations = self.getConfig()['configurations']['hbase-site'],
+      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site'])
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+                              owner = 'hbase',
+                              group = 'hadoop',
+                              conf_dir = '/etc/hbase/conf',
+                              configurations = self.getConfig()['configurations']['core-site'],
+                              configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
+    )
+    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
+      owner = 'hbase',
+      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
+
+    self.assertResourceCalled('Directory', '/var/run/hbase',
+      owner = 'hbase',
+      recursive = True)
+
+    self.assertResourceCalled('Directory', '/var/log/hbase',
+      owner = 'hbase',
+      recursive = True)
+
+    self.assertResourceCalled('File',
+                              '/usr/lib/phoenix/bin/log4j.properties',
+                              mode=0644,
+                              group='hadoop',
+                              owner='hbase',
+                              content='log4jproperties\nline2')
+
+
+    self.assertResourceCalled('Execute', '/usr/hdp/current/phoenix-server/bin/queryserver.py start',
+      not_if = 'ls /var/run/hbase/phoenix-hbase-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/phoenix-hbase-server.pid` >/dev/null 2>&1',
+      user = 'hbase')
+
+    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_client.py
deleted file mode 100644
index dd58ea2..0000000
--- a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_client.py
+++ /dev/null
@@ -1,213 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, call, patch
-from stacks.utils.RMFTestCase import *
-from unittest import skip
-
-@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
-@patch("os.path.exists", new = MagicMock(return_value=True))
-class TestHBaseClient(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "HBASE/1.1.0.2.3/package"
-  STACK_VERSION = "2.3"
-
-  def test_configure_secured(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_client.py",
-                   classname = "HbaseClient",
-                   command = "configure",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-
-    self.assertResourceCalled('Directory', '/etc/hbase',
-      mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-      owner = 'hbase',
-      group = 'hadoop',
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      mode=0775,
-      recursive = True,
-      cd_access='a'
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-policy'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
-    )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
-        content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
-        owner = 'hbase',
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
-      owner = 'hbase',
-      template_tag = 'GANGLIA-RS',
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
-      owner = 'hbase',
-      template_tag = None,
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_client_jaas.conf',
-      owner = 'hbase',
-      template_tag = None,
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hbase',
-                              content='log4jproperties\nline2'
-    )
-    self.assertNoMoreResources()
-    
-  def test_configure_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_client.py",
-                   classname = "HbaseClient",
-                   command = "configure",
-                   config_file="hbase_default.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    self.assertResourceCalled('Directory', '/etc/hbase',
-      mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-      owner = 'hbase',
-      group = 'hadoop',
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      mode=0775,
-      recursive = True,
-      cd_access='a'
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-policy'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
-    )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
-        content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
-        owner = 'hbase',
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
-      owner = 'hbase',
-      template_tag = 'GANGLIA-RS',
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
-      owner = 'hbase',
-      template_tag = None,
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hbase',
-                              content='log4jproperties\nline2'
-    )
-    self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_master.py
deleted file mode 100644
index f4a8d6a..0000000
--- a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_master.py
+++ /dev/null
@@ -1,678 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-from mock.mock import MagicMock, patch
-from stacks.utils.RMFTestCase import *
-from unittest import skip
-
-@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
-@patch("os.path.exists", new = MagicMock(return_value=True))
-class TestHBaseMaster(RMFTestCase):
-  COMMON_SERVICES_PACKAGE_DIR = "HBASE/1.1.0.2.3/package"
-  STACK_VERSION = "2.3"
-
-  def test_configure_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "configure",
-                   config_file="hbase_default.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    
-    self.assert_configure_default()
-    self.assertNoMoreResources()
-
-  def test_start_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "start",
-                   config_file="hbase_default.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    
-    self.assert_configure_default()
-    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
-      not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
-      user = 'hbase'
-    )
-    self.assertNoMoreResources()
-    
-  def test_stop_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "stop",
-                   config_file="hbase_default.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    
-    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf stop master',
-        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/hbase-hbase-master.pid`',
-        timeout = 30,
-        user = 'hbase',
-    )
-    
-    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-master.pid',
-    )
-    self.assertNoMoreResources()
-
-  def test_decom_default(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                       classname = "HbaseMaster",
-                       command = "decommission",
-                       config_file="hbase_default.json",
-                       hdp_stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-
-    self.assertResourceCalled('File', '/usr/hdp/current/hbase-regionserver/bin/draining_servers.rb',
-                              content = StaticFile('draining_servers.rb'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', ' /usr/hdp/current/hbase-regionserver/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/hdp/current/hbase-regionserver/bin/draining_servers.rb add host1',
-                              logoutput = True,
-                              user = 'hbase',
-                              )
-    self.assertResourceCalled('Execute', ' /usr/hdp/current/hbase-regionserver/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/hdp/current/hbase-regionserver/bin/region_mover.rb unload host1',
-                              logoutput = True,
-                              user = 'hbase',
-                              )
-    self.assertNoMoreResources()
-
-  def test_decom_default_draining_only(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                       classname = "HbaseMaster",
-                       command = "decommission",
-                       config_file="default.hbasedecom.json",
-                       hdp_stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-
-    self.assertResourceCalled('File', '/usr/lib/hbase/bin/draining_servers.rb',
-                              content = StaticFile('draining_servers.rb'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', ' /usr/lib/hbase/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/lib/hbase/bin/draining_servers.rb remove host1',
-                              logoutput = True,
-                              user = 'hbase',
-                              )
-    self.assertNoMoreResources()
-
-  def test_configure_secured(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "configure",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    
-    self.assert_configure_secured()
-    self.assertNoMoreResources()
-    
-  def test_start_secured(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "start",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    
-    self.assert_configure_secured()
-    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
-      not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
-      user = 'hbase',
-    )
-    self.assertNoMoreResources()
-    
-  def test_stop_secured(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "stop",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-
-    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf stop master',
-        on_timeout = '! ( ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/hbase-hbase-master.pid`',
-        timeout = 30,
-        user = 'hbase',
-    )
-
-    self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-master.pid',
-    )
-    self.assertNoMoreResources()
-
-  def test_decom_secure(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                       classname = "HbaseMaster",
-                       command = "decommission",
-                       config_file="hbase_secure.json",
-                       hdp_stack_version = self.STACK_VERSION,
-                       target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-
-    self.assertResourceCalled('File', '/usr/hdp/current/hbase-regionserver/bin/draining_servers.rb',
-                              content = StaticFile('draining_servers.rb'),
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase@EXAMPLE.COM; /usr/hdp/current/hbase-regionserver/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/hdp/current/hbase-regionserver/bin/draining_servers.rb add host1',
-                              logoutput = True,
-                              user = 'hbase',
-                              )
-    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/hbase.headless.keytab hbase@EXAMPLE.COM; /usr/hdp/current/hbase-regionserver/bin/hbase --config /etc/hbase/conf org.jruby.Main /usr/hdp/current/hbase-regionserver/bin/region_mover.rb unload host1',
-                              logoutput = True,
-                              user = 'hbase',
-                              )
-    self.assertNoMoreResources()
-
-  def assert_configure_default(self):
-    self.assertResourceCalled('Directory', '/etc/hbase',
-      mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-      owner = 'hbase',
-      group = 'hadoop',
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      mode = 0775,
-      recursive = True,
-      cd_access='a'
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-policy'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
-    )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
-      owner = 'hbase',
-      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
-      owner = 'hbase',
-      template_tag = 'GANGLIA-MASTER',
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
-      owner = 'hbase',
-      template_tag = None,
-    )
-    self.assertResourceCalled('Directory', '/var/run/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/var/log/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hbase',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6405.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'],
-                              )
-
-  def assert_configure_secured(self):
-    self.assertResourceCalled('Directory', '/etc/hbase',
-      mode = 0755
-    )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-      owner = 'hbase',
-      group = 'hadoop',
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      mode = 0775,
-      recursive = True,
-      cd_access='a'
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True
-    )
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True,
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['core-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hdfs',
-      group = 'hadoop',
-      conf_dir = '/etc/hadoop/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
-    )
-    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-policy'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy']
-    )
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
-      owner = 'hbase',
-      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']),
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
-      owner = 'hbase',
-      template_tag = 'GANGLIA-MASTER',
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
-      owner = 'hbase',
-      template_tag = None,
-    )
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hbase_master_jaas.conf',
-      owner = 'hbase',
-      template_tag = None,
-    )
-    self.assertResourceCalled('Directory', '/var/run/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
-    self.assertResourceCalled('Directory', '/var/log/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
-    self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hbase',
-                              content='log4jproperties\nline2'
-    )
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6405.ambari.apache.org:8020/apps/hbase/data',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'],
-                              )
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = True,
-                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = '/usr/bin/kinit',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'],
-                              )
-
-  @skip("enable when there's an upgrade target to test")
-  def test_start_default_22(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "start",
-                   config_file="hbase-2.2.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES)
-    
-    self.assertResourceCalled('Directory', '/etc/hbase',
-      mode = 0755)
-
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-      owner = 'hbase',
-      group = 'hadoop',
-      recursive = True)
-
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      mode = 0775,
-      recursive = True,
-      cd_access='a')
-
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True)
-
-    self.assertResourceCalled('Directory', '/hadoop/hbase/local/jars',
-      owner = 'hbase',
-      group = 'hadoop',
-      mode=0775,
-      recursive = True)
-
-    self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-site'])
-    self.assertResourceCalled('XmlConfig', 'core-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['core-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['core-site'])
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hdfs-site'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'])
-
-    self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
-                              owner = 'hdfs',
-                              group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
-                              configurations = self.getConfig()['configurations']['hdfs-site'],
-                              configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'])
-
-    self.assertResourceCalled('XmlConfig', 'hbase-policy.xml',
-      owner = 'hbase',
-      group = 'hadoop',
-      conf_dir = '/etc/hbase/conf',
-      configurations = self.getConfig()['configurations']['hbase-policy'],
-      configuration_attributes = self.getConfig()['configuration_attributes']['hbase-policy'])
-
-    self.assertResourceCalled('File', '/etc/hbase/conf/hbase-env.sh',
-      owner = 'hbase',
-      content = InlineTemplate(self.getConfig()['configurations']['hbase-env']['content']))
-
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/hadoop-metrics2-hbase.properties',
-      owner = 'hbase',
-      template_tag = 'GANGLIA-MASTER')
-
-    self.assertResourceCalled('TemplateConfig', '/etc/hbase/conf/regionservers',
-      owner = 'hbase',
-      template_tag = None)
-
-    self.assertResourceCalled('Directory', '/var/run/hbase',
-      owner = 'hbase',
-      recursive = True)
-
-    self.assertResourceCalled('Directory', '/var/log/hbase',
-      owner = 'hbase',
-      recursive = True)
-
-    self.assertResourceCalled('File',
-                              '/etc/hbase/conf/log4j.properties',
-                              mode=0644,
-                              group='hadoop',
-                              owner='hbase',
-                              content='log4jproperties\nline2')
-
-    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              mode = 0711,
-                              owner = 'hbase',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create_delayed'])
-
-    self.assertResourceCalled('HdfsDirectory', None,
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/etc/hadoop/conf',
-                              hdfs_user = 'hdfs',
-                              kinit_path_local = "/usr/bin/kinit",
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              action = ['create'])
-
-    self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /etc/hbase/conf start master',
-      not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',
-      user = 'hbase')
-
-    self.assertNoMoreResources()
-
-  @patch("resource_management.libraries.functions.security_commons.build_expectations")
-  @patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
-  @patch("resource_management.libraries.functions.security_commons.validate_security_config_properties")
-  @patch("resource_management.libraries.functions.security_commons.cached_kinit_executor")
-  @patch("resource_management.libraries.script.Script.put_structured_out")
-  def test_security_status(self, put_structured_out_mock, cached_kinit_executor_mock, validate_security_config_mock, get_params_mock, build_exp_mock):
-    # Test that function works when is called with correct parameters
-
-    security_params = {
-      'hbase-site': {
-        'hbase.master.kerberos.principal': '/path/to/hbase_keytab',
-        'hbase.master.keytab.file': 'hbase_principal'
-      }
-    }
-
-    result_issues = []
-    props_value_check = {"hbase.security.authentication": "kerberos",
-                           "hbase.security.authorization": "true"}
-    props_empty_check = ["hbase.master.keytab.file",
-                           "hbase.master.kerberos.principal"]
-
-    props_read_check = ["hbase.master.keytab.file"]
-
-    get_params_mock.return_value = security_params
-    validate_security_config_mock.return_value = result_issues
-
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "security_status",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-
-    build_exp_mock.assert_called_with('hbase-site', props_value_check, props_empty_check, props_read_check)
-    put_structured_out_mock.assert_called_with({"securityState": "SECURED_KERBEROS"})
-    cached_kinit_executor_mock.called_with('/usr/bin/kinit',
-                                           self.config_dict['configurations']['hbase-env']['hbase_user'],
-                                           security_params['hbase-site']['hbase.master.keytab.file'],
-                                           security_params['hbase-site']['hbase.master.kerberos.principal'],
-                                           self.config_dict['hostname'],
-                                           '/tmp')
-
-     # Testing that the exception throw by cached_executor is caught
-    cached_kinit_executor_mock.reset_mock()
-    cached_kinit_executor_mock.side_effect = Exception("Invalid command")
-
-    try:
-      self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "security_status",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-      )
-    except:
-      self.assertTrue(True)
-
-    # Testing with a security_params which doesn't contains hbase-site
-    empty_security_params = {}
-    cached_kinit_executor_mock.reset_mock()
-    get_params_mock.reset_mock()
-    put_structured_out_mock.reset_mock()
-    get_params_mock.return_value = empty_security_params
-
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "security_status",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    put_structured_out_mock.assert_called_with({"securityIssuesFound": "Keytab file or principal are not set property."})
-
-    # Testing with not empty result_issues
-    result_issues_with_params = {}
-    result_issues_with_params['hbase-site']="Something bad happened"
-
-    validate_security_config_mock.reset_mock()
-    get_params_mock.reset_mock()
-    validate_security_config_mock.return_value = result_issues_with_params
-    get_params_mock.return_value = security_params
-
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "security_status",
-                   config_file="hbase_default.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
-
-    # Testing with security_enable = false
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py",
-                   classname = "HbaseMaster",
-                   command = "security_status",
-                   config_file="hbase_secure.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES
-    )
-    put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
-
-  @skip("there's no stacks/2.3/configs/hbase-preupgrade.json")
-  def test_upgrade_backup(self):
-    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_upgrade.py",
-                   classname = "HbaseMasterUpgrade",
-                   command = "snapshot",
-                   config_file="hbase-preupgrade.json",
-                   hdp_stack_version = self.STACK_VERSION,
-                   target = RMFTestCase.TARGET_COMMON_SERVICES)
-
-    self.assertResourceCalled('Execute', " echo 'snapshot_all' | /usr/hdp/current/hbase-client/bin/hbase shell",
-      user = 'hbase')
-  
-    self.assertNoMoreResources()