You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ds...@apache.org on 2016/04/14 15:25:07 UTC

ambari git commit: AMBARI-15506 Support for multiple Namservices in namenode_ha_utils.py (hkropp via dsen)

Repository: ambari
Updated Branches:
  refs/heads/trunk 7bd0ad733 -> a8f1279ba


AMBARI-15506 Support for multiple Namservices in namenode_ha_utils.py (hkropp via dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8f1279b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8f1279b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8f1279b

Branch: refs/heads/trunk
Commit: a8f1279baeb5fb787a66dfda3f491b5bb88e315f
Parents: 7bd0ad7
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Apr 14 16:22:25 2016 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Apr 14 16:22:25 2016 +0300

----------------------------------------------------------------------
 .../resource_management/TestNamenodeHaUtils.py  | 60 ++++++++++++++++++++
 .../libraries/functions/namenode_ha_utils.py    | 37 ++++++++++--
 2 files changed, 93 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8f1279b/ambari-agent/src/test/python/resource_management/TestNamenodeHaUtils.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestNamenodeHaUtils.py b/ambari-agent/src/test/python/resource_management/TestNamenodeHaUtils.py
new file mode 100644
index 0000000..2fc4904
--- /dev/null
+++ b/ambari-agent/src/test/python/resource_management/TestNamenodeHaUtils.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+from unittest import TestCase
+from resource_management.libraries.functions.namenode_ha_utils import \
+  get_nameservice
+
+
+class TestNamenodeHaUtils(TestCase):
+
+  def test_get_nameservice(self):
+    # our cluster is HAA
+
+    # dfs.internal.nameservices in hdfs-site
+    hdfs_site = {
+      "dfs.internal.nameservices": "HAA",
+      "dfs.nameservices": "HAA,HAB",
+      "dfs.ha.namenodes.HAA": "nn1,nn2",
+      "dfs.ha.namenodes.HAB": "nn1,nn2",
+      "dfs.namenode.rpc-address.HAA.nn1": "hosta1:8020",
+      "dfs.namenode.rpc-address.HAA.nn2": "hosta2:8020",
+      "dfs.namenode.rpc-address.HAB.nn1": "hostb1:8020",
+      "dfs.namenode.rpc-address.HAB.nn2": "hostb2:8020",
+    }
+
+    self.assertEqual("HAA", get_nameservice(hdfs_site))
+
+    # dfs.internal.nameservices not in hdfs-site
+    hdfs_site = {
+      "dfs.nameservices": "HAA,HAB",
+      "dfs.ha.namenodes.HAA": "nn1,nn2",
+      "dfs.ha.namenodes.HAB": "nn1,nn2",
+      "dfs.namenode.rpc-address.HAA.nn1": "hosta1:8020",
+      "dfs.namenode.rpc-address.HAA.nn2": "hosta2:8020",
+      "dfs.namenode.rpc-address.HAB.nn1": "hostb1:8020",
+      "dfs.namenode.rpc-address.HAB.nn2": "hostb2:8020",
+    }
+
+    self.assertEqual("HAA", get_nameservice(hdfs_site))
+
+    # Non HA
+    hdfs_site = {}
+
+    self.assertEqual(None, get_nameservice(hdfs_site))

http://git-wip-us.apache.org/repos/asf/ambari/blob/a8f1279b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
index b6f3bee..7187e04 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py
@@ -26,7 +26,8 @@ from resource_management.core import shell
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.decorator import retry
 
-__all__ = ["get_namenode_states", "get_active_namenode", "get_property_for_active_namenode"]
+__all__ = ["get_namenode_states", "get_active_namenode",
+           "get_property_for_active_namenode", "get_nameservice"]
 
 HDFS_NN_STATE_ACTIVE = 'active'
 HDFS_NN_STATE_STANDBY = 'standby'
@@ -67,7 +68,7 @@ def get_namenode_states_noretries(hdfs_site, security_enabled, run_user):
   standby_namenodes = []
   unknown_namenodes = []
   
-  name_service = hdfs_site['dfs.nameservices']
+  name_service = get_nameservice(hdfs_site)
   nn_unique_ids_key = 'dfs.ha.namenodes.' + name_service
 
   # now we have something like 'nn1,nn2,nn3,nn4'
@@ -117,7 +118,7 @@ def get_namenode_states_noretries(hdfs_site, security_enabled, run_user):
   return active_namenodes, standby_namenodes, unknown_namenodes
 
 def is_ha_enabled(hdfs_site):
-  dfs_ha_nameservices = hdfs_site['dfs.nameservices']
+  dfs_ha_nameservices = get_nameservice(hdfs_site)
   
   if is_empty(dfs_ha_nameservices):
     return False
@@ -151,7 +152,7 @@ def get_property_for_active_namenode(hdfs_site, property_name, security_enabled,
   value = None
   rpc_key = None
   if is_ha_enabled(hdfs_site):
-    name_service = hdfs_site['dfs.nameservices']
+    name_service = get_nameservice(hdfs_site)
     active_namenodes = get_namenode_states(hdfs_site, security_enabled, run_user)[0]
     
     if not len(active_namenodes):
@@ -171,3 +172,31 @@ def get_property_for_active_namenode(hdfs_site, property_name, security_enabled,
       value = value.replace(INADDR_ANY, rpc_host)
 
   return value
+
+def get_nameservice(hdfs_site):
+  """
+  Multiple nameservices can be configured for example to support seamless distcp
+  between two HA clusters. The nameservices are defined as a comma separated
+  list in hdfs_site['dfs.nameservices']. The parameter
+  hdfs['dfs.internal.nameservices'] was introduced in Hadoop 2.6 to denote the
+  nameservice for the current cluster (HDFS-6376).
+
+  This method uses hdfs['dfs.internal.nameservices'] to get the current
+  nameservice, if that parameter is not available it tries to splits the value
+  in hdfs_site['dfs.nameservices'] returning the first string or what is
+  contained in hdfs_site['dfs.namenode.shared.edits.dir'].
+
+  By default hdfs_site['dfs.nameservices'] is returned.
+  :param hdfs_site:
+  :return: string or empty
+  """
+  name_service = hdfs_site.get('dfs.internal.nameservices', None)
+  if not name_service:
+    import re
+    name_service = hdfs_site.get('dfs.nameservices', None)
+    if name_service:
+      for ns in name_service.split(","):
+        if 'dfs.namenode.shared.edits.dir' in hdfs_site and re.match(r'.*%s$' % ns, hdfs_site['dfs.namenode.shared.edits.dir']): # better would be core_site['fs.defaultFS'] but it's not available
+          return ns
+      return name_service.split(",")[0] # default to return the first nameservice
+  return name_service