You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2017/11/16 19:45:24 UTC
[08/50] [abbrv] ambari git commit: AMBARI-22370 - Remove HADOOP_HOME
From Environment For Daemons (jonathanhurley)
AMBARI-22370 - Remove HADOOP_HOME From Environment For Daemons (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/780e91e6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/780e91e6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/780e91e6
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 780e91e6124f39fcd67e58369ed2a42da2a6f247
Parents: 444718a
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Nov 6 16:05:56 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 9 08:23:15 2017 -0500
----------------------------------------------------------------------
.../libraries/functions/stack_select.py | 3 ++-
.../ATLAS/0.1.0.2.3/package/scripts/params.py | 2 +-
.../ATLAS/0.7.0.3.0/package/scripts/params.py | 2 +-
.../FALCON/0.5.0.2.1/package/scripts/falcon.py | 11 +----------
.../package/scripts/hive_service_interactive.py | 5 ++---
.../package/scripts/webhcat_service.py | 19 ++++---------------
.../package/scripts/hive_service_interactive.py | 5 ++---
.../2.1.0.3.0/package/scripts/params_linux.py | 7 +++----
.../2.1.0.3.0/package/scripts/webhcat_service.py | 19 ++++---------------
.../1.0.0.2.3/package/scripts/service_check.py | 3 +--
.../1.4.4.2.0/package/scripts/params_linux.py | 4 ++--
.../1.4.4.3.0/package/scripts/params_linux.py | 4 ++--
.../0.4.0.2.1/package/scripts/params_linux.py | 2 +-
.../0.9.0.3.0/package/scripts/params_linux.py | 2 +-
.../2.1.0.2.0/package/scripts/params_linux.py | 2 +-
.../3.0.0.3.0/package/scripts/params_linux.py | 2 +-
.../stacks/2.0.6/HIVE/test_webhcat_server.py | 4 ----
.../stacks/2.1/FALCON/test_falcon_server.py | 10 ++--------
.../python/stacks/2.1/TEZ/test_tez_client.py | 2 +-
.../2.3/MAHOUT/test_mahout_service_check.py | 4 +---
.../stacks/2.5/HIVE/test_hive_server_int.py | 15 +++++----------
21 files changed, 38 insertions(+), 89 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index 9b7d0eb..b741a33 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -71,7 +71,8 @@ HADOOP_DIR_DEFAULTS = {
"libexec": "/usr/lib/hadoop/libexec",
"sbin": "/usr/lib/hadoop/sbin",
"bin": "/usr/bin",
- "lib": "/usr/lib/hadoop/lib"
+ "lib": "/usr/lib/hadoop/lib",
+ "conf": "/etc/hadoop/conf"
}
PACKAGE_SCOPE_INSTALL = "INSTALL"
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 968ceed..31a866e 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -118,7 +118,7 @@ metadata_stop_script = format("{metadata_bin}/atlas_stop.py")
log_dir = config['configurations']['atlas-env']['metadata_log_dir']
# service locations
-hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 'HADOOP_HOME' in os.environ else '/etc/hadoop/conf'
+hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 'HADOOP_HOME' in os.environ else format('{stack_root}/current/hadoop-client/conf')
# some commands may need to supply the JAAS location when running as atlas
atlas_jaas_file = format("{conf_dir}/atlas_jaas.conf")
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
index b01884c..7c1249a 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
@@ -116,7 +116,7 @@ metadata_stop_script = format("{metadata_bin}/atlas_stop.py")
log_dir = config['configurations']['atlas-env']['metadata_log_dir']
# service locations
-hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 'HADOOP_HOME' in os.environ else '/etc/hadoop/conf'
+hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 'HADOOP_HOME' in os.environ else format('{stack_root}/current/hadoop-client/conf')
# some commands may need to supply the JAAS location when running as atlas
atlas_jaas_file = format("{conf_dir}/atlas_jaas.conf")
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index 933515b..7d8fa13 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -209,12 +209,6 @@ def falcon(type, action = None, upgrade_type=None):
owner = params.falcon_user,
create_parents = True)
- # although Falcon's falcon-config.sh will use 'which hadoop' to figure
- # this out, in an upgraded cluster, it's possible that 'which hadoop'
- # still points to older binaries; it's safer to just pass in the
- # hadoop home directory to use
- environment_dictionary = { "HADOOP_HOME" : params.hadoop_home_dir }
-
pid = get_user_call_output.get_user_call_output(format("cat {server_pid_file}"), user=params.falcon_user, is_checked_call=False)[1]
process_exists = format("ls {server_pid_file} && ps -p {pid}")
@@ -223,7 +217,6 @@ def falcon(type, action = None, upgrade_type=None):
Execute(format('{falcon_home}/bin/falcon-config.sh server falcon'),
user = params.falcon_user,
path = params.hadoop_bin_dir,
- environment=environment_dictionary,
not_if = process_exists,
)
except:
@@ -253,7 +246,6 @@ in the Falcon documentation.
Execute(format('{falcon_home}/bin/falcon-start -port {falcon_port}'),
user = params.falcon_user,
path = params.hadoop_bin_dir,
- environment=environment_dictionary,
not_if = process_exists,
)
except:
@@ -264,8 +256,7 @@ in the Falcon documentation.
try:
Execute(format('{falcon_home}/bin/falcon-stop'),
user = params.falcon_user,
- path = params.hadoop_bin_dir,
- environment=environment_dictionary)
+ path = params.hadoop_bin_dir)
except:
show_logs(params.falcon_log_dir, params.falcon_user)
raise
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
index 703d104..71c22d7 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
@@ -52,12 +52,11 @@ def hive_service_interactive(name, action='start', upgrade_type=None):
if action == 'start':
check_fs_root(params.hive_server_interactive_conf_dir, params.execute_path_hive_interactive)
daemon_cmd = cmd
- hadoop_home = params.hadoop_home
- hive_interactive_bin = "hive2"
+ hive_interactive_bin = format("{stack_root}/current/hive-server2-hive2/bin/hive2")
Execute(daemon_cmd,
user = params.hive_user,
- environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': params.java64_home, 'HIVE_BIN': hive_interactive_bin },
+ environment = { 'JAVA_HOME': params.java64_home, 'HIVE_BIN': hive_interactive_bin },
path = params.execute_path,
not_if = process_id_exists_command)
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
index cb4aafd..bddb5b2 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
@@ -40,30 +40,22 @@ def webhcat_service(action='start', rolling_restart=False):
def webhcat_service(action='start', upgrade_type=None):
import params
- environ = {
- 'HADOOP_HOME': params.hadoop_home
- }
-
cmd = format('{webhcat_bin_dir}/webhcat_server.sh')
if action == 'start':
- if upgrade_type is not None and params.version and params.stack_root:
- environ['HADOOP_HOME'] = format("{stack_root}/{version}/hadoop")
-
daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
try:
Execute(daemon_cmd,
user=params.webhcat_user,
- not_if=no_op_test,
- environment = environ)
+ not_if=no_op_test)
except:
show_logs(params.hcat_log_dir, params.webhcat_user)
raise
elif action == 'stop':
try:
# try stopping WebHCat using its own script
- graceful_stop(cmd, environ)
+ graceful_stop(cmd)
except Fail:
show_logs(params.hcat_log_dir, params.webhcat_user)
Logger.info(traceback.format_exc())
@@ -95,17 +87,14 @@ def webhcat_service(action='start', upgrade_type=None):
File(params.webhcat_pid_file, action="delete")
-def graceful_stop(cmd, environ):
+def graceful_stop(cmd):
"""
Attemps to stop WebHCat using its own shell script. On some versions this may not correctly
stop the daemon.
:param cmd: the command to run to stop the daemon
- :param environ: the environment variables to execute the command with
:return:
"""
import params
daemon_cmd = format('{cmd} stop')
- Execute(daemon_cmd,
- user = params.webhcat_user,
- environment = environ)
+ Execute(daemon_cmd, user = params.webhcat_user)
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
index 703d104..71c22d7 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
@@ -52,12 +52,11 @@ def hive_service_interactive(name, action='start', upgrade_type=None):
if action == 'start':
check_fs_root(params.hive_server_interactive_conf_dir, params.execute_path_hive_interactive)
daemon_cmd = cmd
- hadoop_home = params.hadoop_home
- hive_interactive_bin = "hive2"
+ hive_interactive_bin = format("{stack_root}/current/hive-server2-hive2/bin/hive2")
Execute(daemon_cmd,
user = params.hive_user,
- environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': params.java64_home, 'HIVE_BIN': hive_interactive_bin },
+ environment = { 'JAVA_HOME': params.java64_home, 'HIVE_BIN': hive_interactive_bin },
path = params.execute_path,
not_if = process_id_exists_command)
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
index 1bd6a1a..088a540 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
@@ -36,19 +36,17 @@ from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.stack_features import get_stack_feature_version
from resource_management.libraries.functions import upgrade_summary
from resource_management.libraries.functions.get_port_from_url import get_port_from_url
from resource_management.libraries.functions.expect import expect
from resource_management.libraries import functions
-from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
-from ambari_commons.ambari_metrics_helper import select_metric_collector_hosts_from_hostnames
from resource_management.libraries.functions.setup_ranger_plugin_xml import get_audit_configs, generate_ranger_service_config
from resource_management.libraries.functions.get_architecture import get_architecture
from resource_management.core.utils import PasswordString
-from resource_management.core.shell import checked_call
from resource_management.core.exceptions import Fail
from ambari_commons.credential_store_helper import get_password_from_credential_store
@@ -107,7 +105,8 @@ stack_supports_hive_interactive_ga = check_stack_feature(StackFeature.HIVE_INTER
component_directory = status_params.component_directory
component_directory_interactive = status_params.component_directory_interactive
-hadoop_home = format('{stack_root}/current/hadoop-client')
+hadoop_home = stack_select.get_hadoop_dir("home")
+
hive_bin = format('{stack_root}/current/{component_directory}/bin')
hive_cmd = os.path.join(hive_bin, "hive")
hive_schematool_ver_bin = format('{stack_root}/{version}/hive/bin')
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
index cb4aafd..bddb5b2 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
@@ -40,30 +40,22 @@ def webhcat_service(action='start', rolling_restart=False):
def webhcat_service(action='start', upgrade_type=None):
import params
- environ = {
- 'HADOOP_HOME': params.hadoop_home
- }
-
cmd = format('{webhcat_bin_dir}/webhcat_server.sh')
if action == 'start':
- if upgrade_type is not None and params.version and params.stack_root:
- environ['HADOOP_HOME'] = format("{stack_root}/{version}/hadoop")
-
daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat {webhcat_pid_file}` >/dev/null 2>&1')
try:
Execute(daemon_cmd,
user=params.webhcat_user,
- not_if=no_op_test,
- environment = environ)
+ not_if=no_op_test)
except:
show_logs(params.hcat_log_dir, params.webhcat_user)
raise
elif action == 'stop':
try:
# try stopping WebHCat using its own script
- graceful_stop(cmd, environ)
+ graceful_stop(cmd)
except Fail:
show_logs(params.hcat_log_dir, params.webhcat_user)
Logger.info(traceback.format_exc())
@@ -95,17 +87,14 @@ def webhcat_service(action='start', upgrade_type=None):
File(params.webhcat_pid_file, action="delete")
-def graceful_stop(cmd, environ):
+def graceful_stop(cmd):
"""
Attemps to stop WebHCat using its own shell script. On some versions this may not correctly
stop the daemon.
:param cmd: the command to run to stop the daemon
- :param environ: the environment variables to execute the command with
:return:
"""
import params
daemon_cmd = format('{cmd} stop')
- Execute(daemon_cmd,
- user = params.webhcat_user,
- environment = environ)
+ Execute(daemon_cmd, user = params.webhcat_user)
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
index c1151fc..b15d158 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -71,8 +71,7 @@ class MahoutServiceCheck(Script):
Execute( mahout_command,
tries = 3,
try_sleep = 5,
- environment={'HADOOP_HOME': params.hadoop_home,'HADOOP_CONF_DIR': params.hadoop_conf_dir,
- 'MAHOUT_HOME': params.mahout_home,'JAVA_HOME': params.java64_home},
+ environment={'MAHOUT_HOME': params.mahout_home,'JAVA_HOME': params.java64_home},
path = format('/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
user = params.smokeuser
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
index 400c87c..eaf1ee4 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py
@@ -23,11 +23,11 @@ from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.get_kinit_path import get_kinit_path
from resource_management.libraries.script import Script
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.expect import expect
-from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
from resource_management.core.exceptions import Fail
@@ -71,7 +71,7 @@ zoo_conf_dir = "/etc/zookeeper"
if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
sqoop_conf_dir = format("{stack_root}/current/sqoop-client/conf")
sqoop_lib = format("{stack_root}/current/sqoop-client/lib")
- hadoop_home = format("{stack_root}/current/hadoop-client")
+ hadoop_home = stack_select.get_hadoop_dir("home")
hbase_home = format("{stack_root}/current/hbase-client")
hive_home = format("{stack_root}/current/hive-client")
sqoop_bin_dir = format("{stack_root}/current/sqoop-client/bin/")
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
index 400c87c..eaf1ee4 100644
--- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.3.0/package/scripts/params_linux.py
@@ -23,11 +23,11 @@ from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.get_kinit_path import get_kinit_path
from resource_management.libraries.script import Script
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.expect import expect
-from resource_management.libraries.functions.setup_atlas_hook import has_atlas_in_cluster
from resource_management.core.exceptions import Fail
@@ -71,7 +71,7 @@ zoo_conf_dir = "/etc/zookeeper"
if stack_version_formatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_formatted):
sqoop_conf_dir = format("{stack_root}/current/sqoop-client/conf")
sqoop_lib = format("{stack_root}/current/sqoop-client/lib")
- hadoop_home = format("{stack_root}/current/hadoop-client")
+ hadoop_home = stack_select.get_hadoop_dir("home")
hbase_home = format("{stack_root}/current/hbase-client")
hive_home = format("{stack_root}/current/hive-client")
sqoop_bin_dir = format("{stack_root}/current/sqoop-client/bin/")
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 4d63685..cef709b 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -50,7 +50,7 @@ version = default("/commandParams/version", None)
hadoop_lib_home = stack_select.get_hadoop_dir("lib")
# default hadoop parameters
-hadoop_home = '/usr'
+hadoop_home = stack_select.get_hadoop_dir("home")
hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
tez_etc_dir = "/etc/tez"
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/params_linux.py
index 5a028bd..2b3fa38 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/params_linux.py
@@ -48,7 +48,7 @@ stack_version_formatted = format_stack_version(stack_version_unformatted)
version = default("/commandParams/version", None)
# default hadoop parameters
-hadoop_home = '/usr'
+hadoop_home = stack_select.get_hadoop_dir("home")
hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
tez_etc_dir = "/etc/tez"
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 620408b..3e4504d 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -65,7 +65,7 @@ stack_name = status_params.stack_name
stack_root = Script.get_stack_root()
tarball_map = default("/configurations/cluster-env/tarball_map", None)
-config_path = os.path.join(stack_root, "current/hadoop-client/conf")
+config_path = stack_select.get_hadoop_dir("conf")
config_dir = os.path.realpath(config_path)
# get the correct version to use for checking stack features
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index e4dbe2c..617dc3b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@ -65,7 +65,7 @@ stack_name = status_params.stack_name
stack_root = Script.get_stack_root()
tarball_map = default("/configurations/cluster-env/tarball_map", None)
-config_path = os.path.join(stack_root, "current/hadoop-client/conf")
+config_path = stack_select.get_hadoop_dir("conf")
config_dir = os.path.realpath(config_path)
# get the correct version to use for checking stack features
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index f9480ee..b4652ac 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -65,7 +65,6 @@ class TestWebHCatServer(RMFTestCase):
self.assert_configure_default()
self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh start',
- environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
not_if = "ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1",
user = 'hcat',
)
@@ -82,7 +81,6 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh stop',
user = 'hcat',
- environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client' }
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/webhcat/webhcat.pid`',
@@ -148,7 +146,6 @@ class TestWebHCatServer(RMFTestCase):
self.assert_configure_secured()
self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh start',
- environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop'},
not_if = "ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1",
user = 'hcat',
)
@@ -165,7 +162,6 @@ class TestWebHCatServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh stop',
user = 'hcat',
- environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop' }
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/webhcat/webhcat.pid`',
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 7f2ed46..8c48347 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -49,7 +49,6 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh server falcon',
path = ['/usr/bin'],
user = 'falcon',
- environment = {'HADOOP_HOME': '/usr/lib/hadoop'},
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
)
@@ -61,7 +60,6 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start -port 15000',
path = ['/usr/bin'],
user = 'falcon',
- environment = {'HADOOP_HOME': '/usr/lib/hadoop'},
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
)
@@ -78,8 +76,7 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-stop',
path = ['/usr/bin'],
- user = 'falcon',
- environment = {'HADOOP_HOME': '/usr/lib/hadoop'})
+ user = 'falcon')
self.assertResourceCalled('File', '/var/run/falcon/falcon.pid',
action = ['delete'])
@@ -236,8 +233,7 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute',
'/usr/hdp/current/falcon-server/bin/falcon-stop',
- path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'], user='falcon',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'})
+ path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'], user='falcon')
self.assertResourceCalled('File', '/var/run/falcon/falcon.pid',
action = ['delete'])
@@ -406,14 +402,12 @@ class TestFalconServer(RMFTestCase):
)
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh server falcon',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
user = 'falcon',
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
)
self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start -port 15000',
- environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
user = 'falcon',
not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index fad99f6..59b2166 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -130,6 +130,6 @@ class TestTezClient(RMFTestCase):
config_dict = json_content,
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES,
- call_mocks = [(0, None, ''), (0, None)],
+ call_mocks = [(0, None, ''),(0, None, ''), (0, None)],
mocks_dict = mocks_dict)
# for now, it's enough to know the method didn't fail
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index b80476c..8695653 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -109,9 +109,7 @@ class TestMahoutClient(RMFTestCase):
self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
'--charset utf-8',
- environment = {'HADOOP_CONF_DIR': '/usr/hdp/2.2.1.0-2067/hadoop/conf',
- 'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
- 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
+ environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
'MAHOUT_HOME': '/usr/hdp/current/mahout-client'},
path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
tries = 3,
http://git-wip-us.apache.org/repos/asf/ambari/blob/780e91e6/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index 4951c7e..cf79ec7 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -115,8 +115,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -178,8 +177,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -227,8 +225,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -305,8 +302,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',
@@ -364,8 +360,7 @@ class TestHiveServerInteractive(RMFTestCase):
)
self.assertResourceCalled('Execute',
'/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /usr/hdp/current/hive-server2-hive2/conf/conf.server /var/log/hive',
- environment={'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
- 'HIVE_BIN': 'hive2',
+ environment={'HIVE_BIN': '/usr/hdp/current/hive-server2-hive2/bin/hive2',
'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
user='hive',