You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ds...@apache.org on 2015/12/30 15:45:01 UTC

ambari git commit: AMBARI-14524 HDFS Recommendation: dfs.datanode.du.reserved should be set to 10%-15% of volume size (dsen)

Repository: ambari
Updated Branches:
  refs/heads/trunk df267643d -> ce4e4ea30


AMBARI-14524 HDFS Recommendation: dfs.datanode.du.reserved should be set to 10%-15% of volume size (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ce4e4ea3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ce4e4ea3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ce4e4ea3

Branch: refs/heads/trunk
Commit: ce4e4ea305f0ede6fa7129063751d37e19b72d03
Parents: df26764
Author: Dmytro Sen <ds...@apache.org>
Authored: Wed Dec 30 16:44:52 2015 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Wed Dec 30 16:44:52 2015 +0200

----------------------------------------------------------------------
 .../HDFS/2.1.0.2.0/configuration/hdfs-site.xml  |  6 +++
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 28 ++++++++++-
 .../stacks/HDP/2.2/services/stack_advisor.py    |  5 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  6 ++-
 .../stacks/2.0.6/common/test_stack_advisor.py   | 42 ++++++++++++++--
 .../stacks/2.1/common/test_stack_advisor.py     | 15 +++++-
 .../stacks/2.2/common/test_stack_advisor.py     | 50 +++++++++++++++++---
 .../stacks/2.3/common/test_stack_advisor.py     | 44 ++++++++++-------
 8 files changed, 164 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
index 3282522..ea5514d 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
@@ -278,6 +278,12 @@
       <type>int</type>
       <unit>bytes</unit>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>hdfs-site</type>
+        <name>dfs.datanode.data.dir</name>
+      </property>
+    </depends-on>
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 7e2871b..a62e694 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -239,6 +239,27 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
       if len(namenodes.split(',')) > 1:
         putHDFSSitePropertyAttributes("dfs.namenode.rpc-address", "delete", "true")
 
+    #Initialize default 'dfs.datanode.data.dir' if needed
+    if (not hdfsSiteProperties) or ('dfs.datanode.data.dir' not in hdfsSiteProperties):
+      putHDFSSiteProperty('dfs.datanode.data.dir', '/hadoop/hdfs/data')
+    #dfs.datanode.du.reserved should be set to 10-15% of volume size
+    mountPoints = []
+    mountPointDiskAvailableSpace = [] #kBytes
+    for host in hosts["items"]:
+      for diskInfo in host["Hosts"]["disk_info"]:
+        mountPoints.append(diskInfo["mountpoint"])
+        mountPointDiskAvailableSpace.append(long(diskInfo["size"]))
+    maxFreeVolumeSize = 0l #kBytes
+    dataDirs = hdfsSiteProperties['dfs.datanode.data.dir'].split(",")
+    for dataDir in dataDirs:
+      mp = getMountPointForDir(dataDir, mountPoints)
+      for i in range(len(mountPoints)):
+        if mp == mountPoints[i]:
+          if mountPointDiskAvailableSpace[i] > maxFreeVolumeSize:
+            maxFreeVolumeSize = mountPointDiskAvailableSpace[i]
+      
+    putHDFSSiteProperty('dfs.datanode.du.reserved', maxFreeVolumeSize * 1024 / 8) #Bytes
+    
     # recommendations for "hadoop.proxyuser.*.hosts", "hadoop.proxyuser.*.groups" properties in core-site
     self.recommendHadoopProxyUsers(configurations, services, hosts)
 
@@ -787,7 +808,8 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
   def getServiceConfigurationValidators(self):
     return {
-      "HDFS": {"hadoop-env": self.validateHDFSConfigurationsEnv},
+      "HDFS": { "hdfs-site": self.validateHDFSConfigurations,
+                "hadoop-env": self.validateHDFSConfigurationsEnv},
       "MAPREDUCE2": {"mapred-site": self.validateMapReduce2Configurations},
       "YARN": {"yarn-site": self.validateYARNConfigurations},
       "HBASE": {"hbase-env": self.validateHbaseEnvConfigurations},
@@ -1291,6 +1313,10 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
                         {"config-name": "hbase_user", "item": self.validatorEqualsPropertyItem(properties, "hbase_user", hbase_site, "hbase.superuser")} ]
     return self.toConfigurationValidationProblems(validationItems, "hbase-env")
 
+  def validateHDFSConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
+    validationItems = [{"config-name": 'dfs.datanode.du.reserved', "item": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'dfs.datanode.du.reserved')}]
+    return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
+
   def validateHDFSConfigurationsEnv(self, properties, recommendedDefaults, configurations, services, hosts):
     validationItems = [ {"config-name": 'namenode_heapsize', "item": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'namenode_heapsize')},
                         {"config-name": 'namenode_opt_newsize', "item": self.validatorLessThenDefaultValue(properties, recommendedDefaults, 'namenode_opt_newsize')},

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index 12f85b8..9e43865 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -1042,6 +1042,7 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
 
 
   def validateHDFSConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
+    parentValidationProblems = super(HDP22StackAdvisor, self).validateHDFSConfigurations(properties, recommendedDefaults, configurations, services, hosts)
     # We can not access property hadoop.security.authentication from the
     # other config (core-site). That's why we are using another heuristics here
     hdfs_site = properties
@@ -1179,7 +1180,9 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
                                   "item": self.getWarnItem(
                                     "Invalid property value: {0}. Valid values are {1}.".format(
                                       data_transfer_protection_value, VALID_TRANSFER_PROTECTION_VALUES))})
-    return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
+    validationProblems = self.toConfigurationValidationProblems(validationItems, "hdfs-site")
+    validationProblems.extend(parentValidationProblems)
+    return validationProblems
 
   def validateHiveServer2Configurations(self, properties, recommendedDefaults, configurations, services, hosts):
     hive_server2 = properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index 5ab05fd..f57debb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -603,7 +603,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
     return parentValidators
 
   def validateHDFSConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
-    super(HDP23StackAdvisor, self).validateHDFSConfigurations(properties, recommendedDefaults, configurations, services, hosts)
+    parentValidationProblems = super(HDP23StackAdvisor, self).validateHDFSConfigurations(properties, recommendedDefaults, configurations, services, hosts)
 
     # We can not access property hadoop.security.authentication from the
     # other config (core-site). That's why we are using another heuristics here
@@ -618,7 +618,9 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
         validationItems.append({"config-name": 'dfs.namenode.inode.attributes.provider.class',
                                     "item": self.getWarnItem(
                                       "dfs.namenode.inode.attributes.provider.class needs to be set to 'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer' if Ranger HDFS Plugin is enabled.")})
-    return self.toConfigurationValidationProblems(validationItems, "hdfs-site")
+    validationProblems = self.toConfigurationValidationProblems(validationItems, "hdfs-site")
+    validationProblems.extend(parentValidationProblems)
+    return validationProblems
 
 
   def validateHiveConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 6699e94..ec67a82 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -961,7 +961,11 @@ class TestHDP206StackAdvisor(TestCase):
             "ph_cpu_count": 1,
             "public_host_name": "c6401.ambari.apache.org",
             "rack_info": "/default-rack",
-            "total_mem": 2097152
+            "total_mem": 2097152,
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
           }
         }]}
 
@@ -1041,12 +1045,14 @@ class TestHDP206StackAdvisor(TestCase):
                 'falcon-env':
                   {'properties':
                      {'falcon_user': 'falcon'}},
+                'hdfs-site':
+                  {'properties': 
+                     {'dfs.datanode.data.dir': '/hadoop/hdfs/data',
+                      'dfs.datanode.du.reserved': '1024'}},
                 'hive-env':
                   {'properties':
                      {'hive_user': 'hive',
                       'webhcat_user': 'webhcat'}},
-                'hdfs-site':
-                  {'properties': {}},
                 'hadoop-env':
                   {'properties':
                      {'hdfs_user': 'hdfs',
@@ -1094,7 +1100,9 @@ class TestHDP206StackAdvisor(TestCase):
                      {'hive_user': 'hive',
                       'webhcat_user': 'webhcat'}},
                 'hdfs-site':
-                  {'properties': {}},
+                  {'properties': 
+                     {'dfs.datanode.data.dir': '/hadoop/hdfs/data',
+                      'dfs.datanode.du.reserved': '1024'}},
                 'hadoop-env':
                   {'properties':
                      {'hdfs_user': 'hdfs1',
@@ -1110,8 +1118,11 @@ class TestHDP206StackAdvisor(TestCase):
     configurations["hdfs-site"]["properties"]['dfs.nameservices'] = "mycluster"
     configurations["hdfs-site"]["properties"]['dfs.ha.namenodes.mycluster'] = "nn1,nn2"
     services['configurations'] = configurations
+
     expected["hdfs-site"] = {
       'properties': {
+        'dfs.datanode.data.dir': '/hadoop/hdfs/data',
+        'dfs.datanode.du.reserved': '1024',
         'dfs.nameservices': 'mycluster',
         'dfs.ha.namenodes.mycluster': 'nn1,nn2'
       },
@@ -1188,6 +1199,29 @@ class TestHDP206StackAdvisor(TestCase):
     expected = "zk.host1:2183,zk.host2:2183,zk.host3:2183"
     self.assertEquals(result, expected)
 
+  def test_validateHDFSConfigurations(self):
+    configurations = {}
+    services = ''
+    hosts = ''
+    #Default configuration
+    recommendedDefaults = {'dfs.datanode.du.reserved': '1024'}
+    properties = {'dfs.datanode.du.reserved': '1024'}
+    res = self.stackAdvisor.validateHDFSConfigurations(properties, 
+                    recommendedDefaults, configurations, services, hosts)
+    self.assertFalse(res)
+    #Value is less then expected
+    recommendedDefaults = {'dfs.datanode.du.reserved': '1024'}
+    properties = {'dfs.datanode.du.reserved': '512'}
+    res = self.stackAdvisor.validateHDFSConfigurations(properties, 
+                    recommendedDefaults, configurations, services, hosts)
+    self.assertTrue(res)
+    #Value is begger then expected
+    recommendedDefaults = {'dfs.datanode.du.reserved': '1024'}
+    properties = {'dfs.datanode.du.reserved': '2048'}
+    res = self.stackAdvisor.validateHDFSConfigurations(properties, 
+                    recommendedDefaults, configurations, services, hosts)
+    self.assertFalse(res)
+
   def test_validateHDFSConfigurationsEnv(self):
     configurations = {}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
index 63d34af..0ca31a5 100644
--- a/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.1/common/test_stack_advisor.py
@@ -251,7 +251,16 @@ class TestHDP21StackAdvisor(TestCase):
         }
       }
     }
-
+    hosts = {
+      "items": [
+        {
+          "Hosts": {
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
+          }
+        }]}
     services = {
       "services": [
         {
@@ -282,11 +291,13 @@ class TestHDP21StackAdvisor(TestCase):
       },
       "hdfs-site": {
         "properties": {
+          'dfs.datanode.data.dir': '/hadoop/hdfs/data',
+          'dfs.datanode.du.reserved': '1024'
         }
       }
     }
 
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, '')
+    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations, expected)
 
   def test_validateHDFSConfigurationsEnv(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index 4e5458e..5f95b6d 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -206,7 +206,9 @@ class TestHDP22StackAdvisor(TestCase):
 
 
   def test_validateHDFSConfigurations(self):
-    recommendedDefaults = None
+    recommendedDefaults = {
+      'dfs.datanode.du.reserved': '1024'
+    }
 
     unsecure_cluster_core_site = {
       'hadoop.security.authentication': 'simple',
@@ -220,6 +222,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Unsecured cluster, secure ports
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:1022',
     }
@@ -247,6 +250,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Unsecured cluster, unsecure ports
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.datanode.address': '0.0.0.0:55555',
                     'dfs.datanode.http.address': '0.0.0.0:55555',
                     }
@@ -276,6 +280,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, invalid dfs.http.policy value
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'WRONG_VALUE',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:1022',
@@ -310,6 +315,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, https address not defined
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     }
@@ -339,6 +345,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, https address defined and secure
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.https.address': '0.0.0.0:1022',
@@ -369,6 +376,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, https address defined and non secure
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.https.address': '0.0.0.0:50475',
@@ -399,6 +407,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, non secure dfs port, https property not defined
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:50010',
                  }
@@ -450,6 +459,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, non secure dfs port, https defined and secure
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:50010',
                     'dfs.datanode.https.address': '0.0.0.0:1022',
@@ -499,6 +509,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, valid non-root configuration
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:50010',
                     'dfs.datanode.https.address': '0.0.0.0:50475',
@@ -523,6 +534,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTP_ONLY, insecure port
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTP_ONLY',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:50475',
@@ -564,6 +576,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTP_ONLY, valid configuration
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTP_ONLY',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:1022',
@@ -587,6 +600,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, absent dfs.http.policy (typical situation)
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:1022',
                     }
@@ -609,6 +623,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTP_ONLY, misusage of dfs.data.transfer.protection warning
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTP_ONLY',
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:1022',
@@ -638,6 +653,7 @@ class TestHDP22StackAdvisor(TestCase):
 
     # TEST CASE: Secure cluster, dfs.http.policy=HTTPS_ONLY, wrong dfs.data.transfer.protection value
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.http.policy': 'HTTPS_ONLY',
                     'dfs.datanode.address': '0.0.0.0:50010',
                     'dfs.datanode.https.address': '0.0.0.0:50475',
@@ -667,6 +683,7 @@ class TestHDP22StackAdvisor(TestCase):
     # TEST CASE: Hadoop wire encryption enabled
 
     properties = {  # hdfs-site
+                    'dfs.datanode.du.reserved': '1024',
                     'dfs.encrypt.data.transfer': 'true',  # Wire encryption
                     'dfs.datanode.address': '0.0.0.0:1019',
                     'dfs.datanode.http.address': '0.0.0.0:1022',
@@ -2753,6 +2770,7 @@ class TestHDP22StackAdvisor(TestCase):
       },
       'hdfs-site': {
         'properties': {
+          'dfs.datanode.du.reserved': '1024',
           'dfs.datanode.max.transfer.threads': '16384',
           'dfs.namenode.safemode.threshold-pct': '1.000',
           'dfs.datanode.failed.volumes.tolerated': '1',
@@ -2879,7 +2897,11 @@ class TestHDP22StackAdvisor(TestCase):
             "ph_cpu_count" : 1,
             "public_host_name" : "host1",
             "rack_info" : "/default-rack",
-            "total_mem" : 2097152
+            "total_mem" : 2097152,
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
           }
         },
         {
@@ -2892,7 +2914,11 @@ class TestHDP22StackAdvisor(TestCase):
             "ph_cpu_count" : 1,
             "public_host_name" : "host2",
             "rack_info" : "/default-rack",
-            "total_mem" : 10485760
+            "total_mem" : 10485760,
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
           }
         },
       ]
@@ -2916,7 +2942,11 @@ class TestHDP22StackAdvisor(TestCase):
             "ph_cpu_count" : 1,
             "public_host_name" : hostname,
             "rack_info" : "/default-rack",
-            "total_mem" : 2097152
+            "total_mem" : 2097152,
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
           }
         }
       )
@@ -2939,7 +2969,11 @@ class TestHDP22StackAdvisor(TestCase):
             "ph_cpu_count" : 1,
             "public_host_name" : hostname,
             "rack_info" : "/default-rack",
-            "total_mem" : 2097152
+            "total_mem" : 2097152,
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
           }
         }
       )
@@ -2964,7 +2998,11 @@ class TestHDP22StackAdvisor(TestCase):
             "ph_cpu_count" : 1,
             "public_host_name" : hostname,
             "rack_info" : "/default-rack",
-            "total_mem" : 2097152
+            "total_mem" : 2097152,
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
           }
         }
       )

http://git-wip-us.apache.org/repos/asf/ambari/blob/ce4e4ea3/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
index db61f5f..67ac25f 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.3/common/test_stack_advisor.py
@@ -284,13 +284,34 @@ class TestHDP23StackAdvisor(TestCase):
 
 
   def test_recommendHDFSConfigurations(self):
-    configurations = {}
+    configurations = {
+      "hdfs-site": {
+        "properties": {
+          "dfs.namenode.inode.attributes.provider.class": "org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer",
+        }
+      },
+      "ranger-hdfs-plugin-properties": {
+        "properties": {
+          "ranger-hdfs-plugin-enabled": "No"
+        }
+      }
+    }
     clusterData = {
       "totalAvailableRam": 2048,
       "hBaseInstalled": True,
       "hbaseRam": 112,
       "reservedRam": 128
     }
+    hosts = {
+      "items": [
+        {
+          "Hosts": {
+            "disk_info": [{
+              "size": '8',
+              "mountpoint": "/"
+            }]
+          }
+        }]}
     services = {
       "services":
         [
@@ -298,35 +319,26 @@ class TestHDP23StackAdvisor(TestCase):
             "StackServices": {
               "service_name" : "HDFS",
               "service_version" : "2.6.0.2.2"
-            }
+            },
+            "components": [
+            ]
           }
         ],
       "Versions": {
         "stack_version": "2.3"
       },
-      "configurations": {
-        "hdfs-site": {
-          "properties": {
-            "dfs.namenode.inode.attributes.provider.class": "org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer"
-          }
-        },
-        "ranger-hdfs-plugin-properties": {
-          "properties": {
-            "ranger-hdfs-plugin-enabled": "No"
-          }
-        }
-      }
+      "configurations": configurations
     }
 
     # Test with Ranger HDFS plugin disabled
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, None)
+    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations['hdfs-site']['property_attributes']['dfs.namenode.inode.attributes.provider.class'], {'delete': 'true'}, "Test with Ranger HDFS plugin is disabled")
 
     # Test with Ranger HDFS plugin is enabled
     configurations['hdfs-site']['properties'] = {}
     configurations['hdfs-site']['property_attributes'] = {}
     services['configurations']['ranger-hdfs-plugin-properties']['properties']['ranger-hdfs-plugin-enabled'] = 'Yes'
-    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, None)
+    self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations['hdfs-site']['properties']['dfs.namenode.inode.attributes.provider.class'], 'org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer', "Test with Ranger HDFS plugin is enabled")
 
   def test_recommendYARNConfigurations(self):