You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sr...@apache.org on 2015/06/04 02:24:45 UTC

ambari git commit: AMBARI-11552. 2.3 stack advisor doesn't take into account HBASE-11520 (Nick Dimiduk via srimanth)

Repository: ambari
Updated Branches:
  refs/heads/trunk a86701cc4 -> aeccbc7fe


AMBARI-11552. 2.3 stack advisor doesn't take into account HBASE-11520 (Nick Dimiduk via srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aeccbc7f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aeccbc7f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aeccbc7f

Branch: refs/heads/trunk
Commit: aeccbc7fe458509241e16c47f653f65a6ed8c2e4
Parents: a86701c
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Wed Jun 3 17:00:02 2015 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Wed Jun 3 17:09:16 2015 -0700

----------------------------------------------------------------------
 .../stacks/HDP/2.2/services/stack_advisor.py    |  30 ++-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  35 +++
 .../stacks/2.2/common/test_stack_advisor.py     |  33 ++-
 .../stacks/2.3/common/test_stack_advisor.py     | 227 +++++++++++++++++++
 4 files changed, 301 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aeccbc7f/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index df667bc..e466a91 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -486,12 +486,11 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       hbase_bucketcache_percentage_in_combinedcache_str = "{0:.4f}".format(math.ceil(hbase_bucketcache_percentage_in_combinedcache * 10000) / 10000.0)
 
       # Set values in hbase-site
-      putHbaseProperty = self.putProperty(configurations, "hbase-site", services)
-      putHbaseProperty('hfile.block.cache.size', hfile_block_cache_size)
-      putHbaseProperty('hbase.regionserver.global.memstore.size', hbase_regionserver_global_memstore_size)
-      putHbaseProperty('hbase.bucketcache.ioengine', 'offheap')
-      putHbaseProperty('hbase.bucketcache.size', hbase_bucketcache_size)
-      putHbaseProperty('hbase.bucketcache.percentage.in.combinedcache', hbase_bucketcache_percentage_in_combinedcache_str)
+      putHbaseSiteProperty('hfile.block.cache.size', hfile_block_cache_size)
+      putHbaseSiteProperty('hbase.regionserver.global.memstore.size', hbase_regionserver_global_memstore_size)
+      putHbaseSiteProperty('hbase.bucketcache.ioengine', 'offheap')
+      putHbaseSiteProperty('hbase.bucketcache.size', hbase_bucketcache_size)
+      putHbaseSiteProperty('hbase.bucketcache.percentage.in.combinedcache', hbase_bucketcache_percentage_in_combinedcache_str)
 
       # Enable in hbase-env
       putHbaseEnvProperty = self.putProperty(configurations, "hbase-env", services)
@@ -499,13 +498,10 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       putHbaseEnvProperty('hbase_regionserver_heapsize', regionserver_heap_size)
     else:
       # Disable
-      putHbaseProperty = self.putProperty(configurations, "hbase-site", services)
-      putHbaseProperty('hbase.bucketcache.ioengine', '')
-      putHbaseProperty('hbase.bucketcache.size', '')
-      putHbaseProperty('hbase.bucketcache.percentage.in.combinedcache', '')
-
-      putHbaseEnvProperty = self.putProperty(configurations, "hbase-env", services)
-      putHbaseEnvProperty('hbase_max_direct_memory_size', '')
+      putHbaseSitePropertyAttributes('hbase.bucketcache.ioengine', 'delete', 'true')
+      putHbaseSitePropertyAttributes('hbase.bucketcache.size', 'delete', 'true')
+      putHbaseSitePropertyAttributes('hbase.bucketcache.percentage.in.combinedcache', 'delete', 'true')
+      putHbaseEnvPropertyAttributes('hbase_max_direct_memory_size', 'delete', 'true')
 
     # Authorization
     hbase_coprocessor_region_classes = None
@@ -525,12 +521,12 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       hbase_security_authorization = services['configurations']['hbase-site']['properties']['hbase.security.authorization']
     if hbase_security_authorization:
       if 'true' == hbase_security_authorization.lower():
-        putHbaseProperty('hbase.coprocessor.master.classes', "org.apache.hadoop.hbase.security.access.AccessController")
+        putHbaseSiteProperty('hbase.coprocessor.master.classes', "org.apache.hadoop.hbase.security.access.AccessController")
         coprocessorRegionClassList.append("org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint")
         coprocessorRegionClassList.append("org.apache.hadoop.hbase.security.access.AccessController")
-        putHbaseProperty('hbase.coprocessor.regionserver.classes', "org.apache.hadoop.hbase.security.access.AccessController")
+        putHbaseSiteProperty('hbase.coprocessor.regionserver.classes', "org.apache.hadoop.hbase.security.access.AccessController")
       else:
-        putHbaseProperty('hbase.coprocessor.master.classes', "")
+        putHbaseSiteProperty('hbase.coprocessor.master.classes', "")
         coprocessorRegionClassList.append("org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint")
         putHbaseSitePropertyAttributes('hbase.coprocessor.regionserver.classes', 'delete', 'true')
     else:
@@ -551,7 +547,7 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     #Remove duplicates
     uniqueCoprocessorRegionClassList = []
     [uniqueCoprocessorRegionClassList.append(i) for i in coprocessorRegionClassList if not uniqueCoprocessorRegionClassList.count(i)]
-    putHbaseProperty('hbase.coprocessor.region.classes', ','.join(set(uniqueCoprocessorRegionClassList)))
+    putHbaseSiteProperty('hbase.coprocessor.region.classes', ','.join(set(uniqueCoprocessorRegionClassList)))
 
 
   def recommendTezConfigurations(self, configurations, clusterData, services, hosts):

http://git-wip-us.apache.org/repos/asf/ambari/blob/aeccbc7f/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 86a150d..0f85114 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -50,6 +50,41 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     super(HDP23StackAdvisor, self).recommendHBASEConfigurations(configurations, clusterData, services, hosts)
     putHbaseSiteProperty = self.putProperty(configurations, "hbase-site", services)
     putHbaseSitePropertyAttributes = self.putPropertyAttribute(configurations, "hbase-site")
+    putHbaseEnvProperty = self.putProperty(configurations, "hbase-env", services)
+    putHbaseEnvPropertyAttributes = self.putPropertyAttribute(configurations, "hbase-env")
+
+    # bucket cache for 1.x is configured slightly differently, HBASE-11520
+    threshold = 23 # 2 Gb is reserved for other offheap memory
+    if (int(clusterData["hbaseRam"]) > threshold):
+      # To enable cache - calculate values
+      regionserver_total_ram = int(clusterData["hbaseRam"]) * 1024
+      regionserver_heap_size = 20480
+      regionserver_max_direct_memory_size = regionserver_total_ram - regionserver_heap_size
+      hfile_block_cache_size = '0.4'
+      block_cache_heap = 8192 # int(regionserver_heap_size * hfile_block_cache_size)
+      hbase_regionserver_global_memstore_size = '0.4'
+      reserved_offheap_memory = 2048
+      bucketcache_offheap_memory = regionserver_max_direct_memory_size - reserved_offheap_memory
+      hbase_bucketcache_size = bucketcache_offheap_memory
+
+      # Set values in hbase-site
+      putHbaseSiteProperty('hfile.block.cache.size', hfile_block_cache_size)
+      putHbaseSiteProperty('hbase.regionserver.global.memstore.size', hbase_regionserver_global_memstore_size)
+      putHbaseSiteProperty('hbase.bucketcache.ioengine', 'offheap')
+      putHbaseSiteProperty('hbase.bucketcache.size', hbase_bucketcache_size)
+      # 2.2 stack method was called earlier, unset
+      putHbaseSitePropertyAttributes('hbase.bucketcache.percentage.in.combinedcache', 'delete', 'true')
+
+      # Enable in hbase-env
+      putHbaseEnvProperty('hbase_max_direct_memory_size', regionserver_max_direct_memory_size)
+      putHbaseEnvProperty('hbase_regionserver_heapsize', regionserver_heap_size)
+    else:
+      # Disable
+      putHbaseSitePropertyAttributes('hbase.bucketcache.ioengine', 'delete', 'true')
+      putHbaseSitePropertyAttributes('hbase.bucketcache.size', 'delete', 'true')
+      putHbaseSitePropertyAttributes('hbase.bucketcache.percentage.in.combinedcache', 'delete', 'true')
+
+      putHbaseEnvPropertyAttributes('hbase_max_direct_memory_size', 'delete', 'true')
 
     servicesList = [service["StackServices"]["service_name"] for service in services["services"]]
     if 'ranger-hbase-plugin-properties' in services['configurations'] and ('ranger-hbase-plugin-enabled' in services['configurations']['ranger-hbase-plugin-properties']['properties']):

http://git-wip-us.apache.org/repos/asf/ambari/blob/aeccbc7f/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 4ad289d..64166fc 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -1792,15 +1792,24 @@ class TestHDP22StackAdvisor(TestCase):
           "hbase.region.server.rpc.scheduler.factory.class": "org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory",
           "hbase.rpc.controllerfactory.class": "org.apache.hadoop.hbase.ipc.controller.ServerRpcControllerFactory",
           "phoenix.functions.allowUserDefinedFunctions": "true",
-          "hbase.bucketcache.size": "",
-          "hbase.bucketcache.percentage.in.combinedcache": "",
           "hbase.regionserver.global.memstore.size": "0.4",
-          "hbase.bucketcache.ioengine": "",
           "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint"
         },
         'property_attributes': {
           'hbase.coprocessor.regionserver.classes': {
             'delete': 'true'
+          },
+          "hbase.bucketcache.size": {
+            "delete": "true"
+          },
+          "hbase.bucketcache.percentage.in.combinedcache": {
+            "delete": "true"
+          },
+          "hbase.coprocessor.regionserver.classes": {
+            "delete": "true"
+          },
+          "hbase.bucketcache.ioengine": {
+            "delete": "true"
           }
         }
       },
@@ -1808,7 +1817,11 @@ class TestHDP22StackAdvisor(TestCase):
         "properties": {
           "hbase_master_heapsize": "8192",
           "hbase_regionserver_heapsize": "8192",
-          "hbase_max_direct_memory_size": ""
+        },
+        "property_attributes": {
+          "hbase_max_direct_memory_size": {
+            "delete": "true"
+          }
         }
       }
     }
@@ -1823,7 +1836,10 @@ class TestHDP22StackAdvisor(TestCase):
     # Test when phoenix_sql_enabled = false
     services['configurations']['hbase-env']['properties']['phoenix_sql_enabled'] = 'false'
     expected['hbase-site']['properties']['hbase.regionserver.wal.codec'] = 'org.apache.hadoop.hbase.regionserver.wal.WALCellCodec'
-    expected['hbase-site']['property_attributes'] = {'hbase.region.server.rpc.scheduler.factory.class': {'delete': 'true'}, 'hbase.rpc.controllerfactory.class': {'delete': 'true'}, 'hbase.coprocessor.regionserver.classes': {'delete': 'true'}, 'phoenix.functions.allowUserDefinedFunctions': {'delete': 'true'}}
+    expected['hbase-site']['property_attributes']['hbase.region.server.rpc.scheduler.factory.class'] = {'delete': 'true'}
+    expected['hbase-site']['property_attributes']['hbase.rpc.controllerfactory.class'] = {'delete': 'true'}
+    expected['hbase-site']['property_attributes']['hbase.coprocessor.regionserver.classes'] = {'delete': 'true'}
+    expected['hbase-site']['property_attributes']['phoenix.functions.allowUserDefinedFunctions'] = {'delete': 'true'}
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
     self.assertEquals(configurations, expected)
 
@@ -1855,8 +1871,11 @@ class TestHDP22StackAdvisor(TestCase):
                         }]})
     services['configurations']['hbase-env']['properties']['phoenix_sql_enabled'] = 'false'
     expected['hbase-site']['properties']['hbase.regionserver.wal.codec'] = 'org.apache.hadoop.hbase.regionserver.wal.WALCellCodec'
-    expected['hbase-site']['property_attributes'] = {'hbase.region.server.rpc.scheduler.factory.class': {'delete': 'true'}, 'hbase.rpc.controllerfactory.class': {'delete': 'true'}, 'hbase.coprocessor.regionserver.classes': {'delete': 'true'}, 'phoenix.functions.allowUserDefinedFunctions': {'delete': 'true'}}
-    expected['hbase-env']['property_attributes'] = {'hbase_master_heapsize': {'maximum': '49152'}}
+    expected['hbase-site']['property_attributes']['hbase.region.server.rpc.scheduler.factory.class'] = {'delete': 'true'}
+    expected['hbase-site']['property_attributes']['hbase.rpc.controllerfactory.class'] = {'delete': 'true'}
+    expected['hbase-site']['property_attributes']['hbase.coprocessor.regionserver.classes'] = {'delete': 'true'}
+    expected['hbase-site']['property_attributes']['phoenix.functions.allowUserDefinedFunctions'] = {'delete': 'true'}
+    expected['hbase-env']['property_attributes']['hbase_master_heapsize'] = {'maximum': '49152'}
     self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations, expected)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/aeccbc7f/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
new file mode 100644
index 0000000..b72f0e2
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -0,0 +1,227 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import os
+from unittest import TestCase
+from mock.mock import patch, MagicMock
+
+
+class TestHDP23StackAdvisor(TestCase):
+
+  def setUp(self):
+    import imp
+    self.maxDiff = None
+    self.testDirectory = os.path.dirname(os.path.abspath(__file__))
+    stackAdvisorPath = os.path.join(self.testDirectory, '../../../../../main/resources/stacks/stack_advisor.py')
+    hdp206StackAdvisorPath = os.path.join(self.testDirectory, '../../../../../main/resources/stacks/HDP/2.0.6/services/stack_advisor.py')
+    hdp21StackAdvisorPath = os.path.join(self.testDirectory, '../../../../../main/resources/stacks/HDP/2.1/services/stack_advisor.py')
+    hdp22StackAdvisorPath = os.path.join(self.testDirectory, '../../../../../main/resources/stacks/HDP/2.2/services/stack_advisor.py')
+    hdp23StackAdvisorPath = os.path.join(self.testDirectory, '../../../../../main/resources/stacks/HDP/2.3/services/stack_advisor.py')
+    hdp23StackAdvisorClassName = 'HDP23StackAdvisor'
+    with open(stackAdvisorPath, 'rb') as fp:
+      imp.load_module('stack_advisor', fp, stackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    with open(hdp206StackAdvisorPath, 'rb') as fp:
+      imp.load_module('stack_advisor_impl', fp, hdp206StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    with open(hdp21StackAdvisorPath, 'rb') as fp:
+      imp.load_module('stack_advisor_impl', fp, hdp21StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    with open(hdp22StackAdvisorPath, 'rb') as fp:
+      imp.load_module('stack_advisor_impl', fp, hdp22StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    with open(hdp23StackAdvisorPath, 'rb') as fp:
+      stack_advisor_impl = imp.load_module('stack_advisor_impl', fp, hdp23StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
+    clazz = getattr(stack_advisor_impl, hdp23StackAdvisorClassName)
+    self.stackAdvisor = clazz()
+
+    # substitute method in the instance
+    self.get_system_min_uid_real = self.stackAdvisor.get_system_min_uid
+    self.stackAdvisor.get_system_min_uid = self.get_system_min_uid_magic
+
+  @patch('__builtin__.open')
+  @patch('os.path.exists')
+  def get_system_min_uid_magic(self, exists_mock, open_mock):
+    class MagicFile(object):
+      def read(self):
+        return """
+        #test line UID_MIN 200
+        UID_MIN 500
+        """
+
+      def __exit__(self, exc_type, exc_val, exc_tb):
+        pass
+
+      def __enter__(self):
+        return self
+
+    exists_mock.return_value = True
+    open_mock.return_value = MagicFile()
+    return self.get_system_min_uid_real()
+
+  def test_recommendHBASEConfigurations(self):
+    configurations = {
+        "yarn-site": {
+            "properties": {
+                "yarn.scheduler.minimum-allocation-mb": "256",
+                "yarn.scheduler.maximum-allocation-mb": "2048",
+                },
+            }
+    }
+    clusterData = {
+      "totalAvailableRam": 2048,
+      "hBaseInstalled": True,
+      "hbaseRam": 112,
+      "reservedRam": 128
+    }
+    expected = {
+      "hbase-site": {
+        "properties": {
+          "hbase.bucketcache.size": "92160",
+          "hbase.bucketcache.percentage.in.combinedcache": "0.9184",
+          "hbase.regionserver.global.memstore.size": "0.4",
+          "hfile.block.cache.size": "0.4",
+          "hbase.coprocessor.region.classes": "org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint",
+          "hbase.bucketcache.ioengine": "offheap"
+        },
+        "property_attributes": {
+          "hbase.coprocessor.regionserver.classes": {
+            "delete": "true"
+          },
+          "hbase.bucketcache.percentage.in.combinedcache": {
+            "delete": "true"
+          }
+        }
+      },
+      "hbase-env": {
+        "properties": {
+          "hbase_master_heapsize": "114688",
+          "hbase_regionserver_heapsize": "20480",
+          "hbase_max_direct_memory_size": "94208"
+        }
+      },
+      "yarn-site": {
+        "properties": {
+          "yarn.scheduler.minimum-allocation-mb": "256",
+          "yarn.scheduler.maximum-allocation-mb": "2048"
+        }
+      }
+    }
+    services = {"services":
+        [{"StackServices":
+              {"service_name" : "HDFS",
+               "service_version" : "2.6.0.2.2"
+               },
+          "components":[
+            {
+              "href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/DATANODE",
+              "StackServiceComponents":{
+                "advertise_version":"true",
+                "cardinality":"1+",
+                "component_category":"SLAVE",
+                "component_name":"DATANODE",
+                "custom_commands":[
+
+                ],
+                "display_name":"DataNode",
+                "is_client":"false",
+                "is_master":"false",
+                "service_name":"HDFS",
+                "stack_name":"HDP",
+                "stack_version":"2.2",
+                "hostnames":[
+                  "host1"
+                ]
+              },
+              "dependencies":[
+
+              ]
+            },
+            {
+              "href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/JOURNALNODE",
+              "StackServiceComponents":{
+                "advertise_version":"true",
+                "cardinality":"0+",
+                "component_category":"SLAVE",
+                "component_name":"JOURNALNODE",
+                "custom_commands":[
+
+                ],
+                "display_name":"JournalNode",
+                "is_client":"false",
+                "is_master":"false",
+                "service_name":"HDFS",
+                "stack_name":"HDP",
+                "stack_version":"2.2",
+                "hostnames":[
+                  "host1"
+                ]
+              },
+              "dependencies":[
+                {
+                  "href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/JOURNALNODE/dependencies/HDFS_CLIENT",
+                  "Dependencies":{
+                    "component_name":"HDFS_CLIENT",
+                    "dependent_component_name":"JOURNALNODE",
+                    "dependent_service_name":"HDFS",
+                    "stack_name":"HDP",
+                    "stack_version":"2.2"
+                  }
+                }
+              ]
+            },
+            {
+              "href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/NAMENODE",
+              "StackServiceComponents":{
+                "advertise_version":"true",
+                "cardinality":"1-2",
+                "component_category":"MASTER",
+                "component_name":"NAMENODE",
+                "custom_commands":[
+                  "DECOMMISSION",
+                  "REBALANCEHDFS"
+                ],
+                "display_name":"NameNode",
+                "is_client":"false",
+                "is_master":"true",
+                "service_name":"HDFS",
+                "stack_name":"HDP",
+                "stack_version":"2.2",
+                "hostnames":[
+                  "host2"
+                ]
+              },
+              "dependencies":[
+
+              ]
+            },
+          ],
+        }],
+    "configurations": configurations
+    }
+
+    # Test
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations, expected)
+
+    # Test
+    clusterData['hbaseRam'] = '4'
+    expected["hbase-site"]["property_attributes"]["hbase.bucketcache.size"] = {"delete": "true"}
+    expected["hbase-site"]["property_attributes"]["hbase.bucketcache.ioengine"] = {"delete": "true"}
+    expected["hbase-site"]["property_attributes"]["hbase.bucketcache.percentage.in.combinedcache"] = {"delete": "true"}
+    expected["hbase-env"]["property_attributes"] = {"hbase_max_direct_memory_size" : {"delete": "true"}}
+    expected["hbase-env"]["properties"]["hbase_master_heapsize"] = "4096"
+    expected["hbase-env"]["properties"]["hbase_regionserver_heapsize"] = "4096"
+    self.stackAdvisor.recommendHBASEConfigurations(configurations, clusterData, services, None)
+    self.assertEquals(configurations, expected)