You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by vb...@apache.org on 2015/01/14 17:20:32 UTC

ambari git commit: AMBARI-9125. Secondary Namenode fails when trying to create multiple SecondaryNameNode Checkpoint directories via Ambari.(vbrodetskyi)

Repository: ambari
Updated Branches:
  refs/heads/trunk 1a4d5aa6d -> b5c907032


AMBARI-9125. Secondary Namenode fails when trying to create multiple SecondaryNameNode Checkpoint directories via Ambari.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b5c90703
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b5c90703
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b5c90703

Branch: refs/heads/trunk
Commit: b5c907032321d101be123b18906f7738bafed60d
Parents: 1a4d5aa
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Jan 2 14:05:56 2015 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Jan 2 14:05:56 2015 +0200

----------------------------------------------------------------------
 .../HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py   | 13 +++++++------
 .../HDFS/2.1.0.2.0/package/scripts/params.py           |  2 +-
 .../test/python/stacks/2.0.6/HDFS/test_snamenode.py    |  7 +++++++
 .../src/test/python/stacks/2.0.6/configs/default.json  |  2 +-
 4 files changed, 16 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b5c90703/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
index c650c4d..7f0d536 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
@@ -26,12 +26,13 @@ def snamenode(action=None, format=False):
   import params
 
   if action == "configure":
-    Directory(params.fs_checkpoint_dir,
-              recursive=True,
-              recursive_permission=True,
-              mode=0755,
-              owner=params.hdfs_user,
-              group=params.user_group)
+    for fs_checkpoint_dir in params.fs_checkpoint_dirs:
+      Directory(fs_checkpoint_dir,
+                recursive=True,
+                recursive_permission=True,
+                mode=0755,
+                owner=params.hdfs_user,
+                group=params.user_group)
     File(params.exclude_file_path,
          content=Template("exclude_hosts_list.j2"),
          owner=params.hdfs_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5c90703/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
index cfef5c4..a4a0d3d 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
@@ -175,7 +175,7 @@ smoke_hdfs_user_mode = 0770
 namenode_formatted_old_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 namenode_formatted_mark_dir = format("/var/lib/hdfs/namenode/formatted/")
 
-fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
+fs_checkpoint_dirs = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'].split(',')
 
 dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
 dfs_data_dir = ",".join([re.sub(r'^\[.+\]', '', dfs_dir.strip()) for dfs_dir in dfs_data_dir.split(",")])

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5c90703/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 678edc2..37969e5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -237,6 +237,13 @@ class TestSNamenode(RMFTestCase):
                               recursive = True,
                               recursive_permission = True
                               )
+    self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary2',
+                              owner = 'hdfs',
+                              group = 'hadoop',
+                              mode = 0755,
+                              recursive = True,
+                              recursive_permission = True
+    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/security/limits.d',

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5c90703/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index 9132b2f..2be60b6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -257,7 +257,7 @@
             "dfs.datanode.du.reserved": "1073741824", 
             "dfs.webhdfs.enabled": "true", 
             "dfs.namenode.handler.count": "100", 
-            "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary", 
+            "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary,/hadoop/hdfs/namesecondary2",
             "fs.permissions.umask-mode": "022", 
             "dfs.datanode.http.address": "0.0.0.0:50075",
             "dfs.datanode.ipc.address": "0.0.0.0:8010",