You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/11/06 18:34:06 UTC

[1/2] git commit: AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. Fix variable init. (swagle)

Repository: ambari
Updated Branches:
  refs/heads/branch-1.7.0 ceb454991 -> a7e1a3793


AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. Fix variable init. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a7e1a379
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a7e1a379
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a7e1a379

Branch: refs/heads/branch-1.7.0
Commit: a7e1a379330af4370f219e476f04a280d2ac9643
Parents: b5ed441
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Nov 6 09:29:41 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 09:29:56 2014 -0800

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a7e1a379/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 6389f33..ddb2b0b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -137,5 +137,4 @@ if has_nagios:
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
 
-if security_enabled :
-  dn_proc_user=hdfs_user
+dn_proc_user=hdfs_user


[2/2] git commit: AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)

Posted by sw...@apache.org.
AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b5ed4413
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b5ed4413
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b5ed4413

Branch: refs/heads/branch-1.7.0
Commit: b5ed44138a9781acc6eea877eb71a670f4adaa90
Parents: ceb4549
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Wed Nov 5 18:58:37 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 09:29:56 2014 -0800

----------------------------------------------------------------------
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  3 ++
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 ++++++++
 .../services/HDFS/package/scripts/params.py     |  2 +
 .../services/HDFS/package/scripts/utils.py      | 12 ++++-
 .../services/HDFS/configuration/hadoop-env.xml  |  7 +++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 57 ++++++++++++++++++++
 6 files changed, 101 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index fa3b118..6389f33 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -136,3 +136,6 @@ if has_nagios:
 
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
+
+if security_enabled :
+  dn_proc_user=hdfs_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index 25c1067..c192682 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,3 +81,25 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)
+
+def setup_hadoop_env(replace=False):
+  import params
+
+  if params.security_enabled:
+    tc_owner = "root"
+  else:
+    tc_owner = params.hdfs_user
+  Directory(params.hadoop_conf_empty_dir,
+            recursive=True,
+            owner='root',
+            group='root'
+  )
+  Link(params.hadoop_conf_dir,
+       to=params.hadoop_conf_empty_dir,
+       not_if=format("ls {hadoop_conf_dir}")
+  )
+  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+       owner=tc_owner,
+       content=InlineTemplate(params.hadoop_env_sh_template),
+       replace=replace
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 22ce519..7e446f1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -253,3 +253,5 @@ ttnode_heapsize = "1024m"
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
 mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+dn_proc_user=hdfs_user
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 14251cd..36fbc86 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -20,7 +20,7 @@ import os
 
 from resource_management import *
 import re
-
+import hdfs
 
 def service(action=None, name=None, user=None, create_pid_dir=False,
             create_log_dir=False):
@@ -100,7 +100,15 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
           pass  # Pid file content is invalid
         except OSError:
           pass  # Process is not running
-
+    pass
+
+    # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
+    # in secure mode.
+    if user == 'root':
+      params.dn_proc_user = 'root'
+      hdfs.setup_hadoop_env(replace=True)
+    pass
+  pass
 
   hadoop_env_exports_str = ''
   for exp in hadoop_env_exports.items():

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 158dfc0..d25a25b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -134,6 +134,13 @@ export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
+
+HDFS_DN_PROC_USER={{dn_proc_user}}
+if [ $HDFS_DN_PROC_USER="root" ]; then
+  export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
+else
+  export HADOOP_SECURE_DN_USER=""
+fi
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index b5b230d..7f6c43b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -19,6 +19,7 @@ limitations under the License.
 '''
 from ambari_commons import OSCheck
 import json
+import os
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 
@@ -108,6 +109,20 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -138,6 +153,20 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -195,6 +224,20 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -229,6 +272,20 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',