You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sm...@apache.org on 2017/05/10 18:22:13 UTC
ambari git commit: AMBARI-20982. StackAdvisor error - Slider
(Metastore Heap Size) has small range of value from 512 mb to 2048 mb
[upgrade from 2.1.x to 2.4.3] (Dmytro Grinenko via smohanty)
Repository: ambari
Updated Branches:
refs/heads/branch-2.4 05e5cc8c2 -> 36fbb6163
AMBARI-20982. StackAdvisor error - Slider (Metastore Heap Size) has small range of value from 512 mb to 2048 mb [upgrade from 2.1.x to 2.4.3] (Dmytro Grinenko via smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/36fbb616
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/36fbb616
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/36fbb616
Branch: refs/heads/branch-2.4
Commit: 36fbb6163e304a5532f300477fd9d60f2a1584c2
Parents: 05e5cc8
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Wed May 10 11:05:27 2017 -0700
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Wed May 10 11:05:27 2017 -0700
----------------------------------------------------------------------
.../stacks/HDP/2.0.6/services/stack_advisor.py | 11 ++++-
.../stacks/2.0.6/common/test_stack_advisor.py | 52 ++++++++++++++++++++
2 files changed, 62 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/36fbb616/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 928fa92..4ac17af 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -359,7 +359,16 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
self.updateMountProperties("hdfs-site", hdfs_mount_properties, configurations, services, hosts)
- dataDirs = hdfsSiteProperties['dfs.datanode.data.dir'].split(",")
+ dataDirs = ["/hadoop/hdfs/namenode"]
+
+ if configurations and "hdfs-site" in configurations and \
+ "dfs.datanode.data.dir" in configurations["hdfs-site"]["properties"] and \
+ configurations["hdfs-site"]["properties"]["dfs.datanode.data.dir"] is not None:
+ dataDirs = configurations["hdfs-site"]["properties"]["dfs.datanode.data.dir"].split(",")
+
+ elif hdfsSiteProperties and "dfs.datanode.data.dir" in hdfsSiteProperties and \
+ hdfsSiteProperties["dfs.datanode.data.dir"] is not None:
+ dataDirs = hdfsSiteProperties["dfs.datanode.data.dir"].split(",")
# dfs.datanode.du.reserved should be set to 10-15% of volume size
# For each host selects maximum size of the volume. Then gets minimum for all hosts.
http://git-wip-us.apache.org/repos/asf/ambari/blob/36fbb616/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 927f0de..e7a1737 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -1502,6 +1502,58 @@ class TestHDP206StackAdvisor(TestCase):
self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services3, hosts)
self.assertEquals(configurations, expected)
+ hosts = {
+ "items": [
+ {
+ "href": "/api/v1/hosts/host1",
+ "Hosts": {
+ "cpu_count": 1,
+ "host_name": "c6401.ambari.apache.org",
+ "os_arch": "x86_64",
+ "os_type": "centos6",
+ "ph_cpu_count": 1,
+ "public_host_name": "public.c6401.ambari.apache.org",
+ "rack_info": "/default-rack",
+ "total_mem": 2097152,
+ "disk_info": [{
+ "available" : "21039512",
+ "device" : "/dev/vda1",
+ "used" : "3316924",
+ "percent" : "14%",
+ "size" : "25666616",
+ "type" : "ext4",
+ "mountpoint" : "/"
+ },
+ {
+ "available" : "244732200",
+ "device" : "/dev/vdb",
+ "used" : "60508",
+ "percent" : "1%",
+ "size" : "257899908",
+ "type" : "ext4",
+ "mountpoint" : "/grid/0"
+ }]
+ }
+ }
+ ]}
+
+ expected["hdfs-site"] = {
+ 'properties': {
+ 'dfs.datanode.du.reserved': '33011188224',
+ 'dfs.internal.nameservices': 'mycluster',
+ 'dfs.ha.namenodes.mycluster': 'nn1,nn2',
+ 'dfs.datanode.data.dir': '/hadoop/hdfs/data,/grid/0/hadoop/hdfs/data',
+ 'dfs.namenode.name.dir': '/hadoop/hdfs/namenode,/grid/0/hadoop/hdfs/namenode',
+ 'dfs.namenode.checkpoint.dir': '/hadoop/hdfs/namesecondary',
+ },
+ 'property_attributes': {
+ 'dfs.namenode.rpc-address': {
+ 'delete': 'true'
+ }
+ }
+ }
+ self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services3, hosts)
+ self.assertEquals(configurations, expected)
def test_getHostNamesWithComponent(self):