You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/11/07 05:54:09 UTC
[4/4] ambari git commit: Revert "AMBARI-8174. Reverting all patches.
(swagle)"
Revert "AMBARI-8174. Reverting all patches. (swagle)"
This reverts commit b5ed44138a9781acc6eea877eb71a670f4adaa90.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eacb2812
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eacb2812
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eacb2812
Branch: refs/heads/branch-1.7.0
Commit: eacb28127744e320417438b8ac8c1ef4388b69d8
Parents: 44dce92
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Thu Nov 6 20:52:39 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Thu Nov 6 20:52:39 2014 -0800
----------------------------------------------------------------------
.../2.0.6/hooks/before-ANY/scripts/params.py | 3 --
.../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 --------
.../services/HDFS/package/scripts/params.py | 2 -
.../services/HDFS/package/scripts/utils.py | 12 +----
.../services/HDFS/configuration/hadoop-env.xml | 7 ---
.../python/stacks/2.0.6/HDFS/test_datanode.py | 57 --------------------
6 files changed, 2 insertions(+), 101 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 6389f33..fa3b118 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -136,6 +136,3 @@ if has_nagios:
user_list = json.loads(config['hostLevelParams']['user_list'])
group_list = json.loads(config['hostLevelParams']['group_list'])
-
-if security_enabled :
- dn_proc_user=hdfs_user
http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index c192682..25c1067 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,25 +81,3 @@ def hdfs(name=None):
if params.lzo_enabled:
Package(params.lzo_packages_for_current_host)
-
-def setup_hadoop_env(replace=False):
- import params
-
- if params.security_enabled:
- tc_owner = "root"
- else:
- tc_owner = params.hdfs_user
- Directory(params.hadoop_conf_empty_dir,
- recursive=True,
- owner='root',
- group='root'
- )
- Link(params.hadoop_conf_dir,
- to=params.hadoop_conf_empty_dir,
- not_if=format("ls {hadoop_conf_dir}")
- )
- File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
- owner=tc_owner,
- content=InlineTemplate(params.hadoop_env_sh_template),
- replace=replace
- )
http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 7e446f1..22ce519 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -253,5 +253,3 @@ ttnode_heapsize = "1024m"
dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
-
-dn_proc_user=hdfs_user
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 36fbc86..14251cd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -20,7 +20,7 @@ import os
from resource_management import *
import re
-import hdfs
+
def service(action=None, name=None, user=None, create_pid_dir=False,
create_log_dir=False):
@@ -100,15 +100,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
pass # Pid file content is invalid
except OSError:
pass # Process is not running
- pass
-
- # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
- # in secure mode.
- if user == 'root':
- params.dn_proc_user = 'root'
- hdfs.setup_hadoop_env(replace=True)
- pass
- pass
+
hadoop_env_exports_str = ''
for exp in hadoop_env_exports.items():
http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
index 9b43c20..4c60fb2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/configuration/hadoop-env.xml
@@ -134,13 +134,6 @@ export HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}
export JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}
export HADOOP_OPTS="-Dhdp.version=$HDP_VERSION $HADOOP_OPTS"
-
-HDFS_DN_PROC_USER={{dn_proc_user}}
-if [ $HDFS_DN_PROC_USER="root" ]; then
- export HADOOP_SECURE_DN_USER="{{hdfs_user}}"
-else
- export HADOOP_SECURE_DN_USER=""
-fi
</value>
</property>
http://git-wip-us.apache.org/repos/asf/ambari/blob/eacb2812/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 7f6c43b..b5b230d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -19,7 +19,6 @@ limitations under the License.
'''
from ambari_commons import OSCheck
import json
-import os
from mock.mock import MagicMock, patch
from stacks.utils.RMFTestCase import *
@@ -109,20 +108,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
- recursive=True,
- owner='root',
- group='root'
- )
- self.assertResourceCalled('Link', '/etc/hadoop/conf',
- to='/etc/hadoop/conf.empty',
- not_if='ls /etc/hadoop/conf'
- )
- self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
- owner='root',
- content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
- replace=True
- )
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -153,20 +138,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
- recursive=True,
- owner='root',
- group='root'
- )
- self.assertResourceCalled('Link', '/etc/hadoop/conf',
- to='/etc/hadoop/conf.empty',
- not_if='ls /etc/hadoop/conf'
- )
- self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
- owner='root',
- content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
- replace=True
- )
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -224,20 +195,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
- recursive=True,
- owner='root',
- group='root'
- )
- self.assertResourceCalled('Link', '/etc/hadoop/conf',
- to='/etc/hadoop/conf.empty',
- not_if='ls /etc/hadoop/conf'
- )
- self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
- owner='root',
- content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
- replace=True
- )
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
@@ -272,20 +229,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
- recursive=True,
- owner='root',
- group='root'
- )
- self.assertResourceCalled('Link', '/etc/hadoop/conf',
- to='/etc/hadoop/conf.empty',
- not_if='ls /etc/hadoop/conf'
- )
- self.assertResourceCalled('File', os.path.join('/etc/hadoop/conf', 'hadoop-env.sh'),
- owner='root',
- content=InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
- replace=True
- )
self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid',
action = ['delete'],
not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',