You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/11/07 05:54:06 UTC

[1/4] ambari git commit: Revert "AMBARI-8174. Reverting all patches. (swagle)"

Repository: ambari
Updated Branches:
  refs/heads/branch-1.7.0 b7f10831d -> eacb28127


Revert "AMBARI-8174. Reverting all patches. (swagle)"

This reverts commit b7f10831d48de70052bc308596237775c12494a4.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f09a3caa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f09a3caa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f09a3caa

Branch: refs/heads/branch-1.7.0
Commit: f09a3caa212e772f65f79b8dbcc96b5ca330980e
Parents: b7f1083
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Nov 6 20:50:10 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 20:50:10 2014 -0800

----------------------------------------------------------------------
 .../python/resource_management/core/source.py   |  3 +-
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 +++++++
 .../services/HDFS/package/scripts/utils.py      | 18 ++----
 .../services/HDFS/configuration/hadoop-env.xml  |  7 ++-
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 64 +++++++++++++++++---
 5 files changed, 87 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f09a3caa/ambari-common/src/main/python/resource_management/core/source.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/source.py b/ambari-common/src/main/python/resource_management/core/source.py
index bff22f3..22e1c6d 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -114,8 +114,7 @@ else:
       self.context = variables.copy() if variables else {}
       if not hasattr(self, 'template_env'):
         self.template_env = JinjaEnvironment(loader=TemplateLoader(self.env),
-                                        autoescape=False, undefined=StrictUndefined,
-                                        trim_blocks=True)
+                                        autoescape=False, undefined=StrictUndefined, trim_blocks=True)
         
       self.template = self.template_env.get_template(self.name)     
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/f09a3caa/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index 25c1067..c192682 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,3 +81,25 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)
+
+def setup_hadoop_env(replace=False):
+  import params
+
+  if params.security_enabled:
+    tc_owner = "root"
+  else:
+    tc_owner = params.hdfs_user
+  Directory(params.hadoop_conf_empty_dir,
+            recursive=True,
+            owner='root',
+            group='root'
+  )
+  Link(params.hadoop_conf_dir,
+       to=params.hadoop_conf_empty_dir,
+       not_if=format("ls {hadoop_conf_dir}")
+  )
+  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+       owner=tc_owner,
+       content=InlineTemplate(params.hadoop_env_sh_template),
+       replace=replace
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/f09a3caa/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 28a9ccb..c177d9c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -103,22 +103,11 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
     pass
   pass
 
-  service_is_up = check_process if action == "start" else None
-
   # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
   # in secure mode.
-  set_secure_dn_user_cmd="sed -i 's/export HADOOP_SECURE_DN_USER=.*/export " \
-                "HADOOP_SECURE_DN_USER=\"{0}\"/' {1}"
-  if name == 'datanode' and action == 'start':
-    if user == 'root':
-      secure_dn_user = params.hdfs_user
-    else:
-      secure_dn_user = ""
-    pass
-
-    Execute(set_secure_dn_user_cmd.format(secure_dn_user,
-              os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh')),
-            not_if=service_is_up)
+  if name == 'datanode' and user == 'root':
+    params.dn_proc_user = 'root'
+    hdfs.setup_hadoop_env(replace=True)
   pass
 
   hadoop_env_exports_str = ''
@@ -132,6 +121,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
 
   daemon_cmd = format("{ulimit_cmd} su -s /bin/bash - {user} -c '{cmd} {action} {name}'")
 
+  service_is_up = check_process if action == "start" else None
   #remove pid file from dead process
   File(pid_file,
        action="delete",

http://git-wip-us.apache.org/repos/asf/ambari/blob/f09a3caa/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index b768cb4..d186922 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -135,7 +135,12 @@ export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 
-export HADOOP_SECURE_DN_USER=""
+HDFS_DN_PROC_USER={{dn_proc_user}}
+if [ $HDFS_DN_PROC_USER == "root" ]; then
+  export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
+else
+  export HADOOP_SECURE_DN_USER=""
+fi
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/f09a3caa/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 40e5be8..7f6c43b 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -49,9 +49,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"\"/' /etc/hadoop/conf/hadoop-env.sh",
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -112,8 +109,19 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"hdfs\"/' /etc/hadoop/conf/hadoop-env.sh",
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
@@ -145,8 +153,19 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"hdfs\"/' /etc/hadoop/conf/hadoop-env.sh",
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
     )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
@@ -181,9 +200,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Execute', "sed -i 's/export HADOOP_SECURE_DN_USER=.*/export HADOOP_SECURE_DN_USER=\"\"/' /etc/hadoop/conf/hadoop-env.sh",
-                              not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -208,6 +224,20 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -242,6 +272,20 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
+                              recursive=True,
+                              owner='root',
+                              group='root'
+    )
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+                              to='/etc/hadoop/conf.empty',
+                              not_if='ls /etc/hadoop/conf'
+    )
+    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
+                              owner='root',
+                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
+                              replace=True
+    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',


[4/4] ambari git commit: Revert "AMBARI-8174. Reverting all patches. (swagle)"

Posted by sw...@apache.org.
Revert "AMBARI-8174. Reverting all patches. (swagle)"

This reverts commit b5ed44138a9781acc6eea877eb71a670f4adaa90.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eacb2812
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eacb2812
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eacb2812

Branch: refs/heads/branch-1.7.0
Commit: eacb28127744e320417438b8ac8c1ef4388b69d8
Parents: 44dce92
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Nov 6 20:52:39 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 20:52:39 2014 -0800

----------------------------------------------------------------------
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  3 --
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 --------
 .../services/HDFS/package/scripts/params.py     |  2 -
 .../services/HDFS/package/scripts/utils.py      | 12 +----
 .../services/HDFS/configuration/hadoop-env.xml  |  7 ---
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 57 --------------------
 6 files changed, 2 insertions(+), 101 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 6389f33..fa3b118 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -136,6 +136,3 @@ if has_nagios:
 
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
-
-if security_enabled :
-  dn_proc_user=hdfs_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index c192682..25c1067 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,25 +81,3 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)
-
-def setup_hadoop_env(replace=False):
-  import params
-
-  if params.security_enabled:
-    tc_owner = "root"
-  else:
-    tc_owner = params.hdfs_user
-  Directory(params.hadoop_conf_empty_dir,
-            recursive=True,
-            owner='root',
-            group='root'
-  )
-  Link(params.hadoop_conf_dir,
-       to=params.hadoop_conf_empty_dir,
-       not_if=format("ls {hadoop_conf_dir}")
-  )
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-       owner=tc_owner,
-       content=InlineTemplate(params.hadoop_env_sh_template),
-       replace=replace
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 7e446f1..22ce519 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -253,5 +253,3 @@ ttnode_heapsize = "1024m"
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
 mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
-
-dn_proc_user=hdfs_user
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 36fbc86..14251cd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -20,7 +20,7 @@ import os
 
 from resource_management import *
 import re
-import hdfs
+
 
 def service(action=None, name=None, user=None, create_pid_dir=False,
             create_log_dir=False):
@@ -100,15 +100,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
           pass  # Pid file content is invalid
         except OSError:
           pass  # Process is not running
-    pass
-
-    # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
-    # in secure mode.
-    if user == 'root':
-      params.dn_proc_user = 'root'
-      hdfs.setup_hadoop_env(replace=True)
-    pass
-  pass
+
 
   hadoop_env_exports_str = ''
   for exp in hadoop_env_exports.items():

http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 9b43c20..4c60fb2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -134,13 +134,6 @@ export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
 export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
 
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
-
-HDFS_DN_PROC_USER={{dn_proc_user}}
-if [ $HDFS_DN_PROC_USER="root" ]; then
-  export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
-else
-  export HADOOP_SECURE_DN_USER=""
-fi
     </value>
   </property>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 7f6c43b..b5b230d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -19,7 +19,6 @@ limitations under the License.
 '''
 from ambari_commons import OSCheck
 import json
-import os
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 
@@ -109,20 +108,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -153,20 +138,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -224,20 +195,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -272,20 +229,6 @@ class TestDatanode(RMFTestCase):
                               owner = 'hdfs',
                               recursive = True,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              recursive=True,
-                              owner='root',
-                              group='root'
-    )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              to='/etc/hadoop/conf.empty',
-                              not_if='ls /etc/hadoop/conf'
-    )
-    self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
-                              owner='root',
-                              content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
-                              replace=True
-    )
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
                               action = ['delete'],
                               not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',


[2/4] ambari git commit: Revert "AMBARI-8174. Reverting all patches. (swagle)"

Posted by sw...@apache.org.
Revert "AMBARI-8174. Reverting all patches. (swagle)"

This reverts commit c75578a41e80def8841dbebc566a80474f89a8fb.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4e2201c0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4e2201c0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4e2201c0

Branch: refs/heads/branch-1.7.0
Commit: 4e2201c0da112219d21f6e1f4be106ecc4290ce3
Parents: f09a3ca
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Nov 6 20:50:55 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 20:50:55 2014 -0800

----------------------------------------------------------------------
 .../HDP/2.0.6/services/HDFS/package/scripts/utils.py    | 12 ++++++------
 .../HDP/2.2/services/HDFS/configuration/hadoop-env.xml  |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4e2201c0/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index c177d9c..36fbc86 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -101,13 +101,13 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
         except OSError:
           pass  # Process is not running
     pass
-  pass
 
-  # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
-  # in secure mode.
-  if name == 'datanode' and user == 'root':
-    params.dn_proc_user = 'root'
-    hdfs.setup_hadoop_env(replace=True)
+    # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
+    # in secure mode.
+    if user == 'root':
+      params.dn_proc_user = 'root'
+      hdfs.setup_hadoop_env(replace=True)
+    pass
   pass
 
   hadoop_env_exports_str = ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/4e2201c0/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index d186922..9b43c20 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -136,7 +136,7 @@ export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
 export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
 
 HDFS_DN_PROC_USER={{dn_proc_user}}
-if [ $HDFS_DN_PROC_USER == "root" ]; then
+if [ $HDFS_DN_PROC_USER="root" ]; then
   export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
 else
   export HADOOP_SECURE_DN_USER=""


[3/4] ambari git commit: Revert "AMBARI-8174. Reverting all patches. (swagle)"

Posted by sw...@apache.org.
Revert "AMBARI-8174. Reverting all patches. (swagle)"

This reverts commit a7e1a379330af4370f219e476f04a280d2ac9643.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/44dce929
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/44dce929
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/44dce929

Branch: refs/heads/branch-1.7.0
Commit: 44dce929aad6b71f1eed18c0b08c52bbf4b6a1d6
Parents: 4e2201c
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Nov 6 20:51:49 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 20:51:49 2014 -0800

----------------------------------------------------------------------
 .../resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/44dce929/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index ddb2b0b..6389f33 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -137,4 +137,5 @@ if has_nagios:
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
 
-dn_proc_user=hdfs_user
+if security_enabled :
+  dn_proc_user=hdfs_user