You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/02/25 18:11:08 UTC
git commit: AMBARI-4823. Start Services failed on Save and Apply
Configuration step of Enable Security Wizard (aonishuk)
Repository: ambari
Updated Branches:
refs/heads/trunk 6f0f26a56 -> f18a824c3
AMBARI-4823. Start Services failed on Save and Apply Configuration step
of Enable Security Wizard (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f18a824c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f18a824c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f18a824c
Branch: refs/heads/trunk
Commit: f18a824c3cbae96dbcdb0fcdd22ed8c4aba0680c
Parents: 6f0f26a
Author: Andrew Onischuk <ao...@hortonworks.com>
Authored: Tue Feb 25 09:09:26 2014 -0800
Committer: Andrew Onischuk <ao...@hortonworks.com>
Committed: Tue Feb 25 09:10:48 2014 -0800
----------------------------------------------------------------------
.../1.3.2/hooks/before-START/scripts/params.py | 13 ----
.../HDFS/package/scripts/hdfs_datanode.py | 17 +---
.../HDFS/package/scripts/hdfs_namenode.py | 9 +--
.../HDFS/package/scripts/hdfs_snamenode.py | 16 +---
.../services/HDFS/package/scripts/params.py | 13 ----
.../services/HDFS/package/scripts/utils.py | 76 +-----------------
.../ZOOKEEPER/package/scripts/params.py | 2 +-
.../2.0.6/hooks/before-START/scripts/params.py | 13 ----
.../HDFS/package/scripts/hdfs_datanode.py | 18 +----
.../HDFS/package/scripts/hdfs_namenode.py | 9 +--
.../HDFS/package/scripts/hdfs_snamenode.py | 16 +---
.../HDFS/package/scripts/journalnode.py | 8 +-
.../services/HDFS/package/scripts/params.py | 13 ----
.../services/HDFS/package/scripts/utils.py | 81 +-------------------
.../ZOOKEEPER/package/scripts/params.py | 2 +-
.../python/stacks/1.3.2/HDFS/test_datanode.py | 8 --
.../python/stacks/1.3.2/HDFS/test_namenode.py | 8 --
.../python/stacks/1.3.2/HDFS/test_snamenode.py | 8 --
.../python/stacks/2.0.6/HDFS/test_datanode.py | 8 --
.../stacks/2.0.6/HDFS/test_journalnode.py | 8 --
.../python/stacks/2.0.6/HDFS/test_namenode.py | 12 ---
.../python/stacks/2.0.6/HDFS/test_snamenode.py | 8 --
.../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 8 --
23 files changed, 26 insertions(+), 348 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
index 4ad49dd..61ad367 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-START/scripts/params.py
@@ -32,19 +32,6 @@ jdk_location = config['hostLevelParams']['jdk_location']
#security params
_authentication = config['configurations']['core-site']['hadoop.security.authentication']
security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
-dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_secondary_namenode_keytab_file = config['configurations']['hdfs-site']['fs.secondary.namenode.keytab.file']
-dfs_datanode_keytab_file = config['configurations']['hdfs-site']['dfs.datanode.keytab.file']
-dfs_namenode_keytab_file = config['configurations']['hdfs-site']['dfs.namenode.keytab.file']
-
-dfs_datanode_kerberos_principal = config['configurations']['hdfs-site']['dfs.datanode.kerberos.principal']
-dfs_journalnode_kerberos_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.principal']
-dfs_secondary_namenode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.internal.spnego.principal']
-dfs_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.namenode.kerberos.principal']
-dfs_web_authentication_kerberos_principal = config['configurations']['hdfs-site']['dfs.web.authentication.kerberos.principal']
-dfs_secondary_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.principal']
-dfs_journalnode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.internal.spnego.principal']
#users and groups
mapred_user = config['configurations']['global']['mapred_user']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_datanode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_datanode.py
index 7ec3135..50ef5e5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_datanode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_datanode.py
@@ -40,21 +40,10 @@ def datanode(action=None):
owner=params.hdfs_user,
group=params.user_group)
- if action == "start":
+ elif action == "start" or action == "stop":
service(
action=action, name="datanode",
user=params.hdfs_user,
create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_datanode_keytab_file,
- principal=params.dfs_datanode_kerberos_principal
- )
- if action == "stop":
- service(
- action=action, name="datanode",
- user=params.hdfs_user,
- create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_datanode_keytab_file,
- principal=params.dfs_datanode_kerberos_principal
- )
+ create_log_dir=True
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
index 9d193cf..0397817 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
@@ -33,10 +33,8 @@ def namenode(action=None, do_format=True):
pass
service(
action="start", name="namenode", user=params.hdfs_user,
- keytab=params.dfs_namenode_keytab_file,
create_pid_dir=True,
- create_log_dir=True,
- principal=params.dfs_namenode_kerberos_principal
+ create_log_dir=True
)
namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
@@ -51,9 +49,8 @@ def namenode(action=None, do_format=True):
if action == "stop":
service(
- action="stop", name="namenode", user=params.hdfs_user,
- keytab=params.dfs_namenode_keytab_file,
- principal=params.dfs_namenode_kerberos_principal
+ action="stop", name="namenode",
+ user=params.hdfs_user,
)
if action == "decommission":
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_snamenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_snamenode.py
index a943455..dcd7ac4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_snamenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_snamenode.py
@@ -31,23 +31,11 @@ def snamenode(action=None, format=False):
mode=0755,
owner=params.hdfs_user,
group=params.user_group)
- elif action == "start":
+ elif action == "start" or action == "stop":
service(
action=action,
name="secondarynamenode",
user=params.hdfs_user,
create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_secondary_namenode_keytab_file,
- principal=params.dfs_secondary_namenode_kerberos_principal
- )
- elif action == "stop":
- service(
- action=action,
- name="secondarynamenode",
- user=params.hdfs_user,
- create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_secondary_namenode_keytab_file,
- principal=params.dfs_secondary_namenode_kerberos_principal
+ create_log_dir=True
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
index 7824744..98d536c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
@@ -31,22 +31,9 @@ else:
#security params
_authentication = config['configurations']['core-site']['hadoop.security.authentication']
security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
-dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_secondary_namenode_keytab_file = config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']
-dfs_datanode_keytab_file = config['configurations']['hdfs-site']['dfs.datanode.keytab.file']
-dfs_namenode_keytab_file = config['configurations']['hdfs-site']['dfs.namenode.keytab.file']
smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
-dfs_datanode_kerberos_principal = config['configurations']['hdfs-site']['dfs.datanode.kerberos.principal']
-dfs_journalnode_kerberos_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.principal']
-dfs_secondary_namenode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.internal.spnego.principal']
-dfs_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.namenode.kerberos.principal']
-dfs_web_authentication_kerberos_principal = config['configurations']['hdfs-site']['dfs.web.authentication.kerberos.principal']
-dfs_secondary_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.principal']
-dfs_journalnode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.internal.spnego.principal']
-
#exclude file
hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py
index a67d3b2..cf15f1e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/utils.py
@@ -21,10 +21,9 @@ from resource_management import *
def service(action=None, name=None, user=None, create_pid_dir=False,
- create_log_dir=False, keytab=None, principal=None):
+ create_log_dir=False):
import params
- kinit_cmd = "true"
pid_dir = format("{hadoop_pid_dir_prefix}/{user}")
pid_file = format("{pid_dir}/hadoop-{user}-{name}.pid")
log_dir = format("{hdfs_log_dir_prefix}/{user}")
@@ -42,11 +41,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
owner=user,
recursive=True)
- if params.security_enabled:
- principal_replaced = principal.replace("_HOST", params.hostname)
- kinit_cmd = format("kinit -kt {keytab} {principal_replaced}")
-
- if name == "datanode":
+ if params.security_enabled and name == "datanode":
user = "root"
pid_file = format(
"{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
@@ -57,7 +52,6 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
"ls {pid_file} >/dev/null 2>&1 &&"
" ps `cat {pid_file}` >/dev/null 2>&1") if action == "start" else None
- Execute(kinit_cmd)
Execute(daemon_cmd,
user = user,
not_if=service_is_up
@@ -66,68 +60,4 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
File(pid_file,
action="delete",
ignore_failures=True
- )
-
-
-def hdfs_directory(name=None, owner=None, group=None,
- mode=None, recursive_chown=False, recursive_chmod=False):
- import params
-
- dir_exists = format("hadoop fs -ls {name} >/dev/null 2>&1")
- namenode_safe_mode_off = "hadoop dfsadmin -safemode get|grep 'Safe mode is OFF'"
-
- stub_dir = params.namenode_dirs_created_stub_dir
- stub_filename = params.namenode_dirs_stub_filename
- dir_absent_in_stub = format(
- "grep -q '^{name}$' {stub_dir}/{stub_filename} > /dev/null 2>&1; test $? -ne 0")
- record_dir_in_stub = format("echo '{name}' >> {stub_dir}/{stub_filename}")
- tries = 30
- try_sleep = 10
- dfs_check_nn_status_cmd = "true"
-
- #if params.stack_version[0] == "2":
- #mkdir_cmd = format("fs -mkdir -p {name}")
- #else:
- mkdir_cmd = format("fs -mkdir {name}")
-
- if params.security_enabled:
- Execute(format("kinit -kt {hdfs_user_keytab} {hdfs_user}"),
- user = params.hdfs_user)
- ExecuteHadoop(mkdir_cmd,
- try_sleep=try_sleep,
- tries=tries,
- not_if=format(
- "{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
- "{dir_exists} && ! {namenode_safe_mode_off}"),
- only_if=format(
- "su - hdfs -c '{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
- "! {dir_exists}'"),
- conf_dir=params.hadoop_conf_dir,
- user=params.hdfs_user
- )
- Execute(record_dir_in_stub,
- user=params.hdfs_user,
- only_if=format("! {dir_absent_in_stub}")
- )
-
- recursive = "-R" if recursive_chown else ""
- perm_cmds = []
-
- if owner:
- chown = owner
- if group:
- chown = format("{owner}:{group}")
- perm_cmds.append(format("fs -chown {recursive} {chown} {name}"))
- if mode:
- perm_cmds.append(format("fs -chmod {recursive} {mode} {name}"))
- for cmd in perm_cmds:
- ExecuteHadoop(cmd,
- user=params.hdfs_user,
- only_if=format("su - hdfs -c '{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && {namenode_safe_mode_off} && {dir_exists}'"),
- try_sleep=try_sleep,
- tries=tries,
- conf_dir=params.hadoop_conf_dir
- )
-
-
-
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py
index c6a560e..37c5a85 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/ZOOKEEPER/package/scripts/params.py
@@ -53,7 +53,7 @@ zoo_cfg_properties_map_length = len(zoo_cfg_properties_map)
zk_primary_name = "zookeeper"
zk_principal_name = "zookeeper/_HOST@EXAMPLE.COM"
-zk_principal = zk_principal_name.replace('_HOST',hostname)
+zk_principal = zk_principal_name.replace('_HOST',hostname.lower())
java64_home = config['hostLevelParams']['java_home']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 185cca7..38494da 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -32,19 +32,6 @@ jdk_location = config['hostLevelParams']['jdk_location']
#security params
_authentication = config['configurations']['core-site']['hadoop.security.authentication']
security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
-dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_secondary_namenode_keytab_file = config['configurations']['hdfs-site']['fs.secondary.namenode.keytab.file']
-dfs_datanode_keytab_file = config['configurations']['hdfs-site']['dfs.datanode.keytab.file']
-dfs_namenode_keytab_file = config['configurations']['hdfs-site']['dfs.namenode.keytab.file']
-
-dfs_datanode_kerberos_principal = config['configurations']['hdfs-site']['dfs.datanode.kerberos.principal']
-dfs_journalnode_kerberos_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.principal']
-dfs_secondary_namenode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.internal.spnego.principal']
-dfs_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.namenode.kerberos.principal']
-dfs_web_authentication_kerberos_principal = config['configurations']['hdfs-site']['dfs.web.authentication.kerberos.principal']
-dfs_secondary_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.principal']
-dfs_journalnode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.internal.spnego.principal']
#users and groups
mapred_user = config['configurations']['global']['mapred_user']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_datanode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_datanode.py
index 023b653..5ad9eeb 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_datanode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_datanode.py
@@ -20,7 +20,6 @@ limitations under the License.
from resource_management import *
from utils import service
-
def datanode(action=None):
import params
@@ -37,21 +36,10 @@ def datanode(action=None):
owner=params.hdfs_user,
group=params.user_group)
- if action == "start":
- service(
- action=action, name="datanode",
- user=params.hdfs_user,
- create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_datanode_keytab_file,
- principal=params.dfs_datanode_kerberos_principal
- )
- if action == "stop":
+ elif action == "start" or action == "stop":
service(
action=action, name="datanode",
user=params.hdfs_user,
create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_datanode_keytab_file,
- principal=params.dfs_datanode_kerberos_principal
- )
+ create_log_dir=True
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index 0dad995..86a7913 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -41,10 +41,8 @@ def namenode(action=None, do_format=True):
service(
action="start", name="namenode", user=params.hdfs_user,
- keytab=params.dfs_namenode_keytab_file,
create_pid_dir=True,
- create_log_dir=True,
- principal=params.dfs_namenode_kerberos_principal
+ create_log_dir=True
)
if params.dfs_ha_enabled:
dfs_check_nn_status_cmd = format("su - {hdfs_user} -c 'hdfs haadmin -getServiceState {namenode_id} | grep active > /dev/null'")
@@ -64,9 +62,8 @@ def namenode(action=None, do_format=True):
create_hdfs_directories(dfs_check_nn_status_cmd)
if action == "stop":
service(
- action="stop", name="namenode", user=params.hdfs_user,
- keytab=params.dfs_namenode_keytab_file,
- principal=params.dfs_namenode_kerberos_principal
+ action="stop", name="namenode",
+ user=params.hdfs_user
)
if action == "decommission":
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_snamenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_snamenode.py
index a943455..dcd7ac4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_snamenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_snamenode.py
@@ -31,23 +31,11 @@ def snamenode(action=None, format=False):
mode=0755,
owner=params.hdfs_user,
group=params.user_group)
- elif action == "start":
+ elif action == "start" or action == "stop":
service(
action=action,
name="secondarynamenode",
user=params.hdfs_user,
create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_secondary_namenode_keytab_file,
- principal=params.dfs_secondary_namenode_kerberos_principal
- )
- elif action == "stop":
- service(
- action=action,
- name="secondarynamenode",
- user=params.hdfs_user,
- create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_secondary_namenode_keytab_file,
- principal=params.dfs_secondary_namenode_kerberos_principal
+ create_log_dir=True
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
index f2134d5..a707e2e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/journalnode.py
@@ -36,9 +36,7 @@ class JournalNode(Script):
service(
action="start", name="journalnode", user=params.hdfs_user,
create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_journalnode_keytab_file,
- principal=params.dfs_journalnode_kerberos_principal
+ create_log_dir=True
)
def stop(self, env):
@@ -48,9 +46,7 @@ class JournalNode(Script):
service(
action="stop", name="journalnode", user=params.hdfs_user,
create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_journalnode_keytab_file,
- principal=params.dfs_journalnode_kerberos_principal
+ create_log_dir=True
)
def configure(self, env):
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index f5f7a1f..d55488d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -31,23 +31,10 @@ else:
#security params
_authentication = config['configurations']['core-site']['hadoop.security.authentication']
security_enabled = ( not is_empty(_authentication) and _authentication == 'kerberos')
-dfs_journalnode_keytab_file = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_web_authentication_kerberos_keytab = config['configurations']['hdfs-site']['dfs.journalnode.keytab.file']
-dfs_secondary_namenode_keytab_file = config['configurations']['hdfs-site']['dfs.secondary.namenode.keytab.file']
-dfs_datanode_keytab_file = config['configurations']['hdfs-site']['dfs.datanode.keytab.file']
-dfs_namenode_keytab_file = config['configurations']['hdfs-site']['dfs.namenode.keytab.file']
smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
falcon_user = config['configurations']['global']['falcon_user']
-dfs_datanode_kerberos_principal = config['configurations']['hdfs-site']['dfs.datanode.kerberos.principal']
-dfs_journalnode_kerberos_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.principal']
-dfs_secondary_namenode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.internal.spnego.principal']
-dfs_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.namenode.kerberos.principal']
-dfs_web_authentication_kerberos_principal = config['configurations']['hdfs-site']['dfs.web.authentication.kerberos.principal']
-dfs_secondary_namenode_kerberos_principal = config['configurations']['hdfs-site']['dfs.secondary.namenode.kerberos.principal']
-dfs_journalnode_kerberos_internal_spnego_principal = config['configurations']['hdfs-site']['dfs.journalnode.kerberos.internal.spnego.principal']
-
#exclude file
hdfs_exclude_file = default("/clusterHostInfo/decom_dn_hosts", [])
exclude_file_path = config['configurations']['hdfs-site']['dfs.hosts.exclude']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index aafdeee..cf15f1e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -21,10 +21,9 @@ from resource_management import *
def service(action=None, name=None, user=None, create_pid_dir=False,
- create_log_dir=False, keytab=None, principal=None):
+ create_log_dir=False):
import params
- kinit_cmd = "true"
pid_dir = format("{hadoop_pid_dir_prefix}/{user}")
pid_file = format("{pid_dir}/hadoop-{user}-{name}.pid")
log_dir = format("{hdfs_log_dir_prefix}/{user}")
@@ -42,11 +41,7 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
owner=user,
recursive=True)
- if params.security_enabled and name != "zkfc":
- principal_replaced = principal.replace("_HOST", params.hostname)
- kinit_cmd = format("kinit -kt {keytab} {principal_replaced}")
-
- if name == "datanode":
+ if params.security_enabled and name == "datanode":
user = "root"
pid_file = format(
"{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid")
@@ -57,7 +52,6 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
"ls {pid_file} >/dev/null 2>&1 &&"
" ps `cat {pid_file}` >/dev/null 2>&1") if action == "start" else None
- Execute(kinit_cmd)
Execute(daemon_cmd,
user = user,
not_if=service_is_up
@@ -66,73 +60,4 @@ def service(action=None, name=None, user=None, create_pid_dir=False,
File(pid_file,
action="delete",
ignore_failures=True
- )
-
-
-def hdfs_directory(name=None, owner=None, group=None,
- mode=None, recursive_chown=False, recursive_chmod=False):
- import params
-
- dir_exists = format("hadoop fs -ls {name} >/dev/null 2>&1")
- namenode_safe_mode_off = "hadoop dfsadmin -safemode get|grep 'Safe mode is OFF'"
-
- stub_dir = params.namenode_dirs_created_stub_dir
- stub_filename = params.namenode_dirs_stub_filename
- dir_absent_in_stub = format(
- "grep -q '^{name}$' {stub_dir}/{stub_filename} > /dev/null 2>&1; test $? -ne 0")
- record_dir_in_stub = format("echo '{name}' >> {stub_dir}/{stub_filename}")
- tries = 30
- try_sleep = 10
- dfs_check_nn_status_cmd = "true"
-
- if params.dfs_ha_enabled:
- namenode_id = params.namenode_id
- dfs_check_nn_status_cmd = format(
- "hdfs haadmin -getServiceState $namenode_id | grep active > /dev/null")
-
- #if params.stack_version[0] == "2":
- mkdir_cmd = format("fs -mkdir -p {name}")
- #else:
- # mkdir_cmd = format("fs -mkdir {name}")
-
- if params.security_enabled:
- Execute(format("kinit -kt {hdfs_user_keytab} {hdfs_user}"),
- user = params.hdfs_user)
- ExecuteHadoop(mkdir_cmd,
- try_sleep=try_sleep,
- tries=tries,
- not_if=format(
- "! {dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
- "{dir_exists} && ! {namenode_safe_mode_off}"),
- only_if=format(
- "{dir_absent_in_stub} && {dfs_check_nn_status_cmd} && "
- "! {dir_exists}"),
- conf_dir=params.hadoop_conf_dir,
- user=params.hdfs_user
- )
- Execute(record_dir_in_stub,
- user=params.hdfs_user,
- only_if=format("{dir_absent_in_stub}")
- )
-
- recursive = "-R" if recursive_chown else ""
- perm_cmds = []
-
- if owner:
- chown = owner
- if group:
- chown = format("{owner}:{group}")
- perm_cmds.append(format("fs -chown {recursive} {chown} {name}"))
- if mode:
- perm_cmds.append(format("fs -chmod {recursive} {mode} {name}"))
- for cmd in perm_cmds:
- ExecuteHadoop(cmd,
- user=params.hdfs_user,
- only_if=format("! {dir_absent_in_stub} && {dfs_check_nn_status_cmd} && {namenode_safe_mode_off} && {dir_exists}"),
- try_sleep=try_sleep,
- tries=tries,
- conf_dir=params.hadoop_conf_dir
- )
-
-
-
+ )
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
index c6a560e..37c5a85 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/ZOOKEEPER/package/scripts/params.py
@@ -53,7 +53,7 @@ zoo_cfg_properties_map_length = len(zoo_cfg_properties_map)
zk_primary_name = "zookeeper"
zk_principal_name = "zookeeper/_HOST@EXAMPLE.COM"
-zk_principal = zk_principal_name.replace('_HOST',hostname)
+zk_principal = zk_principal_name.replace('_HOST',hostname.lower())
java64_home = config['hostLevelParams']['java_home']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
index 6894a78..b096a0d 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_datanode.py
@@ -47,8 +47,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -69,8 +67,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
not_if = None,
user = 'hdfs',
@@ -105,8 +101,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6402.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
user = 'root',
@@ -126,8 +120,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6402.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
not_if = None,
user = 'root',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
index 0bed3d6..6efa948 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_namenode.py
@@ -57,8 +57,6 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -103,8 +101,6 @@ class TestNamenode(RMFTestCase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
not_if = None,
user = 'hdfs',
@@ -149,8 +145,6 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -198,8 +192,6 @@ class TestNamenode(RMFTestCase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
not_if = None,
user = 'hdfs',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
index 9436264..058c19a 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HDFS/test_snamenode.py
@@ -47,8 +47,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -69,8 +67,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
not_if = None,
user = 'hdfs',
@@ -104,8 +100,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -125,8 +119,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6402.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/bin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
not_if = None,
user = 'hdfs',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 894631f..39374e8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -47,8 +47,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -69,8 +67,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
not_if = None,
user = 'hdfs',
@@ -105,8 +101,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1',
user = 'root',
@@ -127,8 +121,6 @@ class TestDatanode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/dn.service.keytab dn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
not_if = None,
user = 'root',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index f453a6a..b07dc2f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -47,8 +47,6 @@ class TestJournalnode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -69,8 +67,6 @@ class TestJournalnode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode',
not_if = None,
user = 'hdfs',
@@ -105,8 +101,6 @@ class TestJournalnode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/jn.service.keytab jn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start journalnode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-journalnode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -127,8 +121,6 @@ class TestJournalnode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/jn.service.keytab jn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop journalnode',
not_if = None,
user = 'hdfs',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index a91d3f9..8e94be3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -62,8 +62,6 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -110,8 +108,6 @@ class TestNamenode(RMFTestCase):
command = "stop",
config_file="default.json"
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
not_if = None,
user = 'hdfs',
@@ -161,8 +157,6 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -212,8 +206,6 @@ class TestNamenode(RMFTestCase):
command = "stop",
config_file="secured.json"
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop namenode',
not_if = None,
user = 'hdfs',
@@ -244,8 +236,6 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -306,8 +296,6 @@ class TestNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start namenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
user = 'hdfs',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 7693d3f..45b84bd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -47,8 +47,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -69,8 +67,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
not_if = None,
user = 'hdfs',
@@ -105,8 +101,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start secondarynamenode',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-secondarynamenode.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -127,8 +121,6 @@ class TestSNamenode(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'kinit -kt /etc/security/keytabs/nn.service.keytab nn/c6401.ambari.apache.org@EXAMPLE.COM',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop secondarynamenode',
not_if = None,
user = 'hdfs',
http://git-wip-us.apache.org/repos/asf/ambari/blob/f18a824c/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index b258c5d..29ca054 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -37,8 +37,6 @@ class TestZkfc(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -60,8 +58,6 @@ class TestZkfc(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc',
not_if = None,
user = 'hdfs',
@@ -86,8 +82,6 @@ class TestZkfc(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start zkfc',
not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid >/dev/null 2>&1 && ps `cat /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid` >/dev/null 2>&1',
user = 'hdfs',
@@ -108,8 +102,6 @@ class TestZkfc(RMFTestCase):
owner = 'hdfs',
recursive = True,
)
- self.assertResourceCalled('Execute', 'true',
- )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop zkfc',
not_if = None,
user = 'hdfs',