You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2016/03/09 00:33:49 UTC
[7/9] ambari git commit: AMBARI-15329: Code Cleanup: Remove hdp
hardcodings in functions, variables etc. (jluniya)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index d60e961..664cafa 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -24,8 +24,8 @@ from resource_management.core.exceptions import Fail
from resource_management.core.resources.system import Execute
from resource_management.core import shell
from resource_management.libraries.functions import format
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.version import compare_versions
@@ -74,24 +74,24 @@ def post_upgrade_deregister():
Execute(command, user=params.hive_user, path=hive_execute_path, tries=1 )
-def _get_hive_execute_path(hdp_stack_version):
+def _get_hive_execute_path(stack_version_formatted):
"""
Returns the exact execute path to use for the given stack-version.
This method does not return the "current" path
- :param hdp_stack_version: Exact stack-version to use in the new path
+ :param stack_version_formatted: Exact stack-version to use in the new path
:return: Hive execute path for the exact hdp stack-version
"""
import params
hive_execute_path = params.execute_path
- formatted_stack_version = format_hdp_stack_version(hdp_stack_version)
+ formatted_stack_version = format_stack_version(stack_version_formatted)
if formatted_stack_version and compare_versions(formatted_stack_version, "2.2") >= 0:
# hive_bin
- new_hive_bin = format('/usr/hdp/{hdp_stack_version}/hive/bin')
+ new_hive_bin = format('/usr/hdp/{stack_version_formatted}/hive/bin')
if (os.pathsep + params.hive_bin) in hive_execute_path:
hive_execute_path = hive_execute_path.replace(os.pathsep + params.hive_bin, os.pathsep + new_hive_bin)
# hadoop_bin_dir
- new_hadoop_bin = hdp_select.get_hadoop_dir_for_stack_version("bin", hdp_stack_version)
+ new_hadoop_bin = stack_select.get_hadoop_dir_for_stack_version("bin", stack_version_formatted)
old_hadoop_bin = params.hadoop_bin_dir
if new_hadoop_bin and len(new_hadoop_bin) > 0 and (os.pathsep + old_hadoop_bin) in hive_execute_path:
hive_execute_path = hive_execute_path.replace(os.pathsep + old_hadoop_bin, os.pathsep + new_hadoop_bin)
@@ -117,7 +117,7 @@ def _get_current_hiveserver_version():
source_version = params.current_version
hive_execute_path = _get_hive_execute_path(source_version)
version_hive_bin = params.hive_bin
- formatted_source_version = format_hdp_stack_version(source_version)
+ formatted_source_version = format_stack_version(source_version)
if formatted_source_version and compare_versions(formatted_source_version, "2.2") >= 0:
version_hive_bin = format('/usr/hdp/{source_version}/hive/bin')
command = format('{version_hive_bin}/hive --version')
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 2531598..63ad482 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -32,7 +32,7 @@ from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.is_empty import is_empty
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.copy_tarball import STACK_VERSION_PATTERN
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script
@@ -53,11 +53,11 @@ hostname = config["hostname"]
# This is expected to be of the form #.#.#.#
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = Script.is_hdp_stack_less_than("2.2")
+stack_version_formatted_major = format_stack_version(stack_version_unformatted)
+stack_is_hdp21 = Script.is_stack_less_than("2.2")
# this is not available on INSTALL action because hdp-select is not available
-hdp_stack_version = functions.get_hdp_version('hive-server2')
+stack_version_formatted = functions.get_stack_version('hive-server2')
# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade.
# It cannot be used during the initial Cluser Install because the version is not yet known.
@@ -109,7 +109,7 @@ webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
# Starting from HDP2.3 drop should be executed with purge suffix
purge_tables = "false"
-if Script.is_hdp_stack_greater_or_equal("2.3"):
+if Script.is_stack_greater_or_equal("2.3"):
purge_tables = 'true'
# this is NOT a typo. HDP-2.3 configs for hcatalog/webhcat point to a
@@ -117,7 +117,7 @@ if Script.is_hdp_stack_greater_or_equal("2.3"):
hcat_conf_dir = '/usr/hdp/current/hive-webhcat/etc/hcatalog'
config_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat'
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
hive_specific_configs_supported = True
component_directory = status_params.component_directory
@@ -287,7 +287,7 @@ target = format("{hive_lib}/{jdbc_jar_name}")
jars_in_hive_lib = format("{hive_lib}/*.jar")
-if Script.is_hdp_stack_less_than("2.2"):
+if Script.is_stack_less_than("2.2"):
source_jdbc_file = target
else:
# normally, the JDBC driver would be referenced by /usr/hdp/current/.../foo.jar
@@ -304,7 +304,7 @@ start_metastore_path = format("{tmp_dir}/start_metastore_script")
hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
if 'role' in config and config['role'] in ["HIVE_SERVER", "HIVE_METASTORE"]:
- if Script.is_hdp_stack_less_than("2.2"):
+ if Script.is_stack_less_than("2.2"):
hive_heapsize = config['configurations']['hive-site']['hive.heapsize']
else:
hive_heapsize = config['configurations']['hive-env']['hive.heapsize']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
index 359604f..7c21b5f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py
@@ -26,7 +26,7 @@ config = Script.get_config()
# This is expected to be of the form #.#.#.#
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
hdp_root = None
hive_conf_dir = None
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index 8c035db..d0924b9 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -21,7 +21,7 @@ limitations under the License.
from ambari_commons import OSCheck
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
@@ -72,7 +72,7 @@ else:
# default configuration directories
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
- hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+ hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
webhcat_conf_dir = '/etc/hive-webhcat/conf'
hive_etc_dir_prefix = "/etc/hive"
hive_conf_dir = "/etc/hive/conf"
@@ -82,13 +82,13 @@ else:
hive_server_conf_dir = "/etc/hive/conf.server"
# HDP 2.2+
- if Script.is_hdp_stack_greater_or_equal("2.2"):
+ if Script.is_stack_greater_or_equal("2.2"):
webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
hive_client_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
# HDP 2.3+
- if Script.is_hdp_stack_greater_or_equal("2.3"):
+ if Script.is_stack_greater_or_equal("2.3"):
# ranger is only compatible with this location on HDP 2.3+, not HDP 2.2
hive_server_conf_dir = format("/usr/hdp/current/{component_directory}/conf/conf.server")
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 401debc..b95f6f3 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -90,7 +90,7 @@ def webhcat():
)
# if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created
- if Script.is_hdp_stack_greater_or_equal("2.3") and params.version:
+ if Script.is_stack_greater_or_equal("2.3") and params.version:
XmlConfig("hive-site.xml",
conf_dir = format("/usr/hdp/{version}/hive/conf"),
configurations = params.config['configurations']['hive-site'],
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index c12b168..b0a876d 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -20,7 +20,7 @@ Ambari Agent
"""
from resource_management import *
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
@@ -75,11 +75,11 @@ class WebHCatServerDefault(WebHCatServer):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
# webhcat has no conf, but uses hadoop home, so verify that regular hadoop conf is set
conf_select.select(params.stack_name, "hive-hcatalog", params.version)
conf_select.select(params.stack_name, "hadoop", params.version)
- hdp_select.select("hive-webhcat", params.version)
+ stack_select.select("hive-webhcat", params.version)
def security_status(self, env):
import status_params
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py
index 2556741..43b318c 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka.py
@@ -20,7 +20,7 @@ limitations under the License.
import collections
import os
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
from resource_management.libraries.resources.properties_file import PropertiesFile
from resource_management.libraries.resources.template_config import TemplateConfig
from resource_management.core.resources.system import Directory, Execute, File, Link
@@ -39,7 +39,7 @@ def kafka(upgrade_type=None):
# This still has an issue of hostnames being alphabetically out-of-order for broker.id in HDP-2.2.
# Starting in HDP 2.3, Kafka handles the generation of broker.id so Ambari doesn't have to.
- effective_version = params.hdp_stack_version if upgrade_type is None else format_hdp_stack_version(params.version)
+ effective_version = params.stack_version_formatted if upgrade_type is None else format_stack_version(params.version)
Logger.info(format("Effective stack version: {effective_version}"))
if effective_version is not None and effective_version != "" and compare_versions(effective_version, '2.2.0.0') >= 0 and compare_versions(effective_version, '2.3.0.0') < 0:
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
index 3f650bd..314d702 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/kafka_broker.py
@@ -20,9 +20,9 @@ from resource_management import Script
from resource_management.core.logger import Logger
from resource_management.core.resources.system import Execute, File, Directory
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import Direction
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.check_process_status import check_process_status
from kafka import ensure_base_directories
@@ -48,22 +48,22 @@ class KafkaBroker(Script):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
- hdp_select.select("kafka-broker", params.version)
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
+ stack_select.select("kafka-broker", params.version)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0:
conf_select.select(params.stack_name, "kafka", params.version)
# This is extremely important since it should only be called if crossing the HDP 2.3.4.0 boundary.
if params.current_version and params.version and params.upgrade_direction:
src_version = dst_version = None
if params.upgrade_direction == Direction.UPGRADE:
- src_version = format_hdp_stack_version(params.current_version)
- dst_version = format_hdp_stack_version(params.version)
+ src_version = format_stack_version(params.current_version)
+ dst_version = format_stack_version(params.version)
else:
# These represent the original values during the UPGRADE direction
- src_version = format_hdp_stack_version(params.version)
- dst_version = format_hdp_stack_version(params.downgrade_from_version)
+ src_version = format_stack_version(params.version)
+ dst_version = format_stack_version(params.downgrade_from_version)
if compare_versions(src_version, '2.3.4.0') < 0 and compare_versions(dst_version, '2.3.4.0') >= 0:
# Calling the acl migration script requires the configs to be present.
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
index d629533..ba6857e 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
@@ -19,15 +19,15 @@ limitations under the License.
"""
from resource_management.libraries.functions import format
from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
from resource_management.libraries.functions.default import default
from utils import get_bare_principal
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.libraries.functions.is_empty import is_empty
import status_params
from resource_management.core.logger import Logger
from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import get_kinit_path
@@ -47,7 +47,7 @@ current_version = default("/hostLevelParams/current_version", None)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
upgrade_direction = default("/commandParams/upgrade_direction", None)
# When downgrading the 'version' and 'current_version' are both pointing to the downgrade-target version
@@ -69,7 +69,7 @@ kafka_user_nofile_limit = config['configurations']['kafka-env']['kafka_user_nofi
kafka_user_nproc_limit = config['configurations']['kafka-env']['kafka_user_nproc_limit']
# parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
kafka_home = '/usr/hdp/current/kafka-broker/'
kafka_bin = kafka_home+'bin/kafka'
conf_dir = "/usr/hdp/current/kafka-broker/config"
@@ -139,7 +139,7 @@ security_enabled = config['configurations']['cluster-env']['security_enabled']
kafka_kerberos_enabled = ('security.inter.broker.protocol' in config['configurations']['kafka-broker'] and
config['configurations']['kafka-broker']['security.inter.broker.protocol'] == "PLAINTEXTSASL")
-if security_enabled and hdp_stack_version != "" and 'kafka_principal_name' in config['configurations']['kafka-env'] and compare_versions(hdp_stack_version, '2.3') >= 0:
+if security_enabled and stack_version_formatted != "" and 'kafka_principal_name' in config['configurations']['kafka-env'] and compare_versions(stack_version_formatted, '2.3') >= 0:
_hostname_lowercase = config['hostname'].lower()
_kafka_principal_name = config['configurations']['kafka-env']['kafka_principal_name']
kafka_jaas_principal = _kafka_principal_name.replace('_HOST',_hostname_lowercase)
@@ -248,7 +248,7 @@ if has_ranger_admin and is_supported_kafka_ranger:
ssl_truststore_password = unicode(config['configurations']['ranger-kafka-policymgr-ssl']['xasecure.policymgr.clientssl.truststore.password']) if xml_configurations_supported else None
credential_file = format('/etc/ranger/{repo_name}/cred.jceks') if xml_configurations_supported else None
- hdp_version = get_hdp_version('kafka-broker')
+ hdp_version = get_stack_version('kafka-broker')
setup_ranger_env_sh_source = format('/usr/hdp/{hdp_version}/ranger-kafka-plugin/install/conf.templates/enable/kafka-ranger-env.sh')
setup_ranger_env_sh_target = format("{conf_dir}/kafka-ranger-env.sh")
@@ -264,7 +264,7 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py
index 5436e47..df10136 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox.py
@@ -66,7 +66,7 @@ def knox():
content=InlineTemplate(params.admin_topology_template)
)
- if Script.is_hdp_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
+ if Script.is_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"),
group=params.knox_group,
owner=params.knox_user,
@@ -123,7 +123,7 @@ def knox():
content=InlineTemplate(params.admin_topology_template)
)
- if Script.is_hdp_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
+ if Script.is_stack_greater_or_equal_to(params.version_formatted, "2.3.8.0"):
File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"),
group=params.knox_group,
owner=params.knox_user,
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
index 4285c6e..30b9a41 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
@@ -22,12 +22,12 @@ import tarfile
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions import conf_select, tar_archive
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.check_process_status import check_process_status
from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import Direction
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, validate_security_config_properties, get_params_from_filesystem, \
@@ -112,7 +112,7 @@ class KnoxGatewayDefault(KnoxGateway):
def pre_upgrade_restart(self, env, upgrade_type=None):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
absolute_backup_dir = None
if params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
@@ -123,7 +123,7 @@ class KnoxGatewayDefault(KnoxGateway):
# conf-select will change the symlink to the conf folder.
conf_select.select(params.stack_name, "knox", params.version)
- hdp_select.select("knox-server", params.version)
+ stack_select.select("knox-server", params.version)
# Extract the tar of the old conf folder into the new conf directory
if absolute_backup_dir is not None and params.upgrade_direction and params.upgrade_direction == Direction.UPGRADE:
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index c42c123..297f77d 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -22,15 +22,15 @@ from resource_management.core.logger import Logger
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.get_port_from_url import get_port_from_url
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script
from status_params import *
from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import conf_select
# server configurations
@@ -41,11 +41,11 @@ stack_name = default("/hostLevelParams/stack_name", None)
upgrade_direction = default("/commandParams/upgrade_direction", None)
version = default("/commandParams/version", None)
# E.g., 2.3.2.0
-version_formatted = format_hdp_stack_version(version)
+version_formatted = format_stack_version(version)
# E.g., 2.3
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
# This is the version whose state is CURRENT. During an RU, this is the source version.
# DO NOT format it since we need the build number too.
@@ -59,7 +59,7 @@ knox_data_dir = '/var/lib/knox/data'
# Important, it has to be strictly greater than 2.3.0.0!!!
if stack_name and stack_name.upper() == "HDP":
Logger.info(format("HDP version to use is {version_formatted}"))
- if Script.is_hdp_stack_greater(version_formatted, "2.3.0.0"):
+ if Script.is_stack_greater(version_formatted, "2.3.0.0"):
# This is the current version. In the case of a Rolling Upgrade, it will be the newer version.
# In the case of a Downgrade, it will be the version downgrading to.
# This is always going to be a symlink to /var/lib/knox/data_${version}
@@ -82,7 +82,7 @@ ldap_bin = '/usr/lib/knox/bin/ldap.sh'
knox_client_bin = '/usr/lib/knox/bin/knoxcli.sh'
# HDP 2.2+ parameters
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
knox_bin = '/usr/hdp/current/knox-server/bin/gateway.sh'
knox_conf_dir = '/usr/hdp/current/knox-server/conf'
ldap_bin = '/usr/hdp/current/knox-server/bin/ldap.sh'
@@ -96,7 +96,7 @@ knox_group = default("/configurations/knox-env/knox_group", "knox")
mode = 0644
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
dfs_ha_enabled = False
dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
@@ -334,7 +334,7 @@ hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] if
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") if has_namenode else None
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
import functools
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
index cf47b63..b1a5ebc 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/status_params.py
@@ -30,7 +30,7 @@ if OSCheck.is_windows_family():
knox_ldap_win_service_name = "ldap"
else:
knox_conf_dir = '/etc/knox/conf'
- if Script.is_hdp_stack_greater_or_equal("2.2"):
+ if Script.is_stack_greater_or_equal("2.2"):
knox_conf_dir = '/usr/hdp/current/knox-server/conf'
knox_pid_dir = config['configurations']['knox-env']['knox_pid_dir']
knox_pid_file = format("{knox_pid_dir}/gateway.pid")
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py
index 55c5060..8dd0cb2 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/upgrade.py
@@ -27,7 +27,7 @@ from resource_management.core.exceptions import Fail
from resource_management.libraries.functions import tar_archive
from resource_management.libraries.functions import format
from resource_management.libraries.functions import Direction
-from resource_management.libraries.functions.version import compare_versions,format_hdp_stack_version
+from resource_management.libraries.functions.version import compare_versions,format_stack_version
BACKUP_TEMP_DIR = "knox-upgrade-backup"
@@ -82,7 +82,7 @@ def _get_directory_mappings_during_upgrade():
knox_data_dir = '/var/lib/knox/data'
if params.stack_name and params.stack_name.upper() == "HDP" and \
- compare_versions(format_hdp_stack_version(params.upgrade_from_version), "2.3.0.0") > 0:
+ compare_versions(format_stack_version(params.upgrade_from_version), "2.3.0.0") > 0:
# Use the version that is being upgraded from.
knox_data_dir = format('/usr/hdp/{upgrade_from_version}/knox/data')
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
index cc41f3a..38269cb 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
@@ -20,7 +20,7 @@ Ambari Agent
"""
from resource_management.core.logger import Logger
from resource_management.core.exceptions import ClientComponentHasNoStatus
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import conf_select
from resource_management.libraries.script import Script
from mahout import mahout
@@ -38,7 +38,7 @@ class MahoutClient(Script):
env.set_params(params)
conf_select.select(params.stack_name, "mahout", params.version)
- hdp_select.select("mahout-client", params.version )
+ stack_select.select("mahout-client", params.version )
def install(self, env):
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index b1667a8..2c57e96 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -20,9 +20,9 @@ Ambari Agent
"""
from resource_management import *
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import format
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script
@@ -35,7 +35,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
version = default("/commandParams/version", None)
@@ -48,8 +48,8 @@ mahout_user = config['configurations']['mahout-env']['mahout_user']
yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
#hadoop params
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
-hadoop_home = hdp_select.get_hadoop_dir("home")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_home = stack_select.get_hadoop_dir("home")
# the configuration direction for HDFS/YARN/MapR is the hadoop config
# directory, which is symlinked by hadoop-client only
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index df9ecfe..81a227e 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -146,7 +146,7 @@ def oozie(is_server=False):
owner=params.oozie_user
)
- if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+ if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
File(format("{params.conf_dir}/adminusers.txt"),
mode=0644,
group=params.user_group,
@@ -318,7 +318,7 @@ def oozie_server_specific():
mode = 0644,
)
- if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+ if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
# Create hive-site and tez-site configs for oozie
Directory(params.hive_conf_dir,
create_parents = True,
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
index 2e29464..4fc50d2 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
@@ -21,7 +21,7 @@ limitations under the License.
import sys
from resource_management import *
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from oozie import oozie
from oozie_service import oozie_service
@@ -53,12 +53,12 @@ class OozieClient(Script):
# this function should not execute if the version can't be determined or
# is not at least HDP 2.2.0.0
- if not params.version or compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') < 0:
+ if not params.version or compare_versions(format_stack_version(params.version), '2.2.0.0') < 0:
return
Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
conf_select.select(params.stack_name, "oozie", params.version)
- hdp_select.select("oozie-client", params.version)
+ stack_select.select("oozie-client", params.version)
# We substitute some configs (oozie.authentication.kerberos.principal) before generation (see oozie.py and params.py).
# This function returns changed configs (it's used for config generation before config download)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
index b87e453..030fb2d 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
@@ -22,8 +22,8 @@ from resource_management.core import Logger
from resource_management.libraries.script import Script
from resource_management.libraries.functions import compare_versions
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import format_stack_version
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import default
from resource_management.libraries.functions.constants import Direction
@@ -65,17 +65,17 @@ class OozieServer(Script):
if upgrade_type is not None and params.upgrade_direction == Direction.UPGRADE and params.version is not None:
Logger.info(format("Configuring Oozie during upgrade type: {upgrade_type}, direction: {params.upgrade_direction}, and version {params.version}"))
- if compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
# In order for the "/usr/hdp/current/oozie-<client/server>" point to the new version of
# oozie, we need to create the symlinks both for server and client.
# This is required as both need to be pointing to new installed oozie version.
# Sets the symlink : eg: /usr/hdp/current/oozie-client -> /usr/hdp/2.3.x.y-<version>/oozie
- hdp_select.select("oozie-client", params.version)
+ stack_select.select("oozie-client", params.version)
# Sets the symlink : eg: /usr/hdp/current/oozie-server -> /usr/hdp/2.3.x.y-<version>/oozie
- hdp_select.select("oozie-server", params.version)
+ stack_select.select("oozie-server", params.version)
- if compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0:
+ if compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0:
conf_select.select(params.stack_name, "oozie", params.version)
env.set_params(params)
@@ -187,16 +187,16 @@ class OozieServerDefault(OozieServer):
# this function should not execute if the version can't be determined or
# is not at least HDP 2.2.0.0
- if not params.version or compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') < 0:
+ if not params.version or compare_versions(format_stack_version(params.version), '2.2.0.0') < 0:
return
Logger.info("Executing Oozie Server Stack Upgrade pre-restart")
OozieUpgrade.backup_configuration()
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "oozie", params.version)
- hdp_select.select("oozie-server", params.version)
+ stack_select.select("oozie-server", params.version)
OozieUpgrade.restore_configuration()
OozieUpgrade.prepare_libext_directory()
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
index f0ebd20..27e2766 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
@@ -30,8 +30,8 @@ from resource_management.core.resources.system import File
from resource_management.libraries.functions import Direction
from resource_management.libraries.functions import format
from resource_management.libraries.functions import compare_versions
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import format_stack_version
from resource_management.libraries.functions import tar_archive
from resource_management.libraries.script.script import Script
@@ -108,7 +108,7 @@ class OozieUpgrade(Script):
# some versions of HDP don't need the lzo compression libraries
target_version_needs_compression_libraries = compare_versions(
- format_hdp_stack_version(params.version), '2.2.1.0') >= 0
+ format_stack_version(params.version), '2.2.1.0') >= 0
# ensure the directory exists
Directory(params.oozie_libext_dir, mode = 0777)
@@ -162,7 +162,7 @@ class OozieUpgrade(Script):
oozie.download_database_library_if_needed()
# get the upgrade version in the event that it's needed
- upgrade_stack = hdp_select._get_upgrade_stack()
+ upgrade_stack = stack_select._get_upgrade_stack()
if upgrade_stack is None or len(upgrade_stack) < 2 or upgrade_stack[1] is None:
raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
@@ -226,7 +226,7 @@ class OozieUpgrade(Script):
command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
Execute(command, user=params.oozie_user, logoutput=True)
- upgrade_stack = hdp_select._get_upgrade_stack()
+ upgrade_stack = stack_select._get_upgrade_stack()
if upgrade_stack is None or len(upgrade_stack) < 2 or upgrade_stack[1] is None:
raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
@@ -278,7 +278,7 @@ class OozieUpgrade(Script):
params.HdfsResource(None, action = "execute")
- upgrade_stack = hdp_select._get_upgrade_stack()
+ upgrade_stack = stack_select._get_upgrade_stack()
if upgrade_stack is None or upgrade_stack[1] is None:
raise Fail("Unable to determine the stack that is being upgraded to or downgraded to.")
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 072b127..0decbc2 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -22,8 +22,8 @@ from ambari_commons.constants import AMBARI_SUDO_BINARY
from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.functions import format
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions import get_port_from_url
@@ -51,17 +51,17 @@ agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_sta
agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
-hadoop_lib_home = hdp_select.get_hadoop_dir("lib")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
#hadoop params
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
# something like 2.3.0.0-1234
stack_version = None
- upgrade_stack = hdp_select._get_upgrade_stack()
+ upgrade_stack = stack_select._get_upgrade_stack()
if upgrade_stack is not None and len(upgrade_stack) == 2 and upgrade_stack[1] is not None:
stack_version = upgrade_stack[1]
@@ -143,7 +143,7 @@ oozie_site = config['configurations']['oozie-site']
# Need this for yarn.nodemanager.recovery.dir in yarn-site
yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
-if security_enabled and Script.is_hdp_stack_less_than("2.2"):
+if security_enabled and Script.is_stack_less_than("2.2"):
#older versions of oozie have problems when using _HOST in principal
oozie_site = dict(config['configurations']['oozie-site'])
oozie_site['oozie.service.HadoopAccessorService.kerberos.principal'] = \
@@ -194,7 +194,7 @@ if https_port is not None:
hdfs_site = config['configurations']['hdfs-site']
fs_root = config['configurations']['core-site']['fs.defaultFS']
-if Script.is_hdp_stack_less_than("2.2"):
+if Script.is_stack_less_than("2.2"):
put_shared_lib_to_hdfs_cmd = format("hadoop --config {hadoop_conf_dir} dfs -put {oozie_shared_lib} {oozie_hdfs_user_dir}")
# for newer
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
index d575bd1..954bb80 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/status_params.py
@@ -48,7 +48,7 @@ else:
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
conf_dir = "/etc/oozie/conf"
- if Script.is_hdp_stack_greater_or_equal("2.2"):
+ if Script.is_stack_greater_or_equal("2.2"):
conf_dir = format("/usr/hdp/current/{component_directory}/conf")
tmp_dir = Script.get_tmp_dir()
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index fc6fd81..ff41105 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -22,8 +22,8 @@ Ambari Agent
from resource_management.libraries.script.script import Script
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
@@ -34,22 +34,22 @@ tmp_dir = Script.get_tmp_dir()
stack_name = default("/hostLevelParams/stack_name", None)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
version = default("/commandParams/version", None)
# hadoop default parameters
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
pig_conf_dir = "/etc/pig/conf"
hadoop_home = '/usr'
pig_bin_dir = ""
# hadoop parameters for 2.2+
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
pig_conf_dir = "/usr/hdp/current/pig-client/conf"
- hadoop_home = hdp_select.get_hadoop_dir("home")
+ hadoop_home = stack_select.get_hadoop_dir("home")
pig_bin_dir = '/usr/hdp/current/pig-client/bin'
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
index 36c188e..304ca15 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
@@ -23,7 +23,7 @@ import sys
import os
from resource_management import *
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from pig import pig
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -46,10 +46,10 @@ class PigClientLinux(PigClient):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "pig", params.version)
conf_select.select(params.stack_name, "hadoop", params.version)
- hdp_select.select("hadoop-client", params.version) # includes pig-client
+ stack_select.select("hadoop-client", params.version) # includes pig-client
def install(self, env):
self.install_packages(env)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index fc819b8..155e63c 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -84,7 +84,7 @@ class PigServiceCheckLinux(PigServiceCheck):
bin_dir = params.hadoop_bin_dir
)
- if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+ if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
# cleanup results from previous test
params.HdfsResource(output_dir,
type="directory",
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index 6b6bf28..e5b54cd 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -19,7 +19,7 @@ limitations under the License.
"""
import os
from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.is_empty import is_empty
@@ -43,7 +43,7 @@ version = default("/commandParams/version", None)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
upgrade_marker_file = format("{tmp_dir}/rangeradmin_ru.inprogress")
@@ -51,8 +51,8 @@ xml_configurations_supported = config['configurations']['ranger-env']['xml_confi
create_db_dbuser = config['configurations']['ranger-env']['create_db_dbuser']
-stack_is_hdp22_or_further = Script.is_hdp_stack_greater_or_equal("2.2")
-stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
+stack_is_hdp22_or_further = Script.is_stack_greater_or_equal("2.2")
+stack_is_hdp23_or_further = Script.is_stack_greater_or_equal("2.3")
downgrade_from_version = default("/commandParams/downgrade_from_version", None)
upgrade_direction = default("/commandParams/upgrade_direction", None)
@@ -60,7 +60,7 @@ upgrade_direction = default("/commandParams/upgrade_direction", None)
ranger_conf = '/etc/ranger/admin/conf'
ranger_ugsync_conf = '/etc/ranger/usersync/conf'
-if upgrade_direction == Direction.DOWNGRADE and compare_versions(format_hdp_stack_version(version),'2.3' ) < 0:
+if upgrade_direction == Direction.DOWNGRADE and compare_versions(format_stack_version(version),'2.3' ) < 0:
stack_is_hdp22_or_further = True
stack_is_hdp23_or_further = False
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
index f145ac5..07f3ab6 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
@@ -17,7 +17,7 @@ See the License for the specific language governing permissions and
limitations under the License.
"""
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.script import Script
from resource_management.core.resources.system import Execute
from resource_management.core.exceptions import ComponentIsNotRunning
@@ -123,7 +123,7 @@ class RangerAdmin(Script):
import params
env.set_params(params)
- upgrade_stack = hdp_select._get_upgrade_stack()
+ upgrade_stack = stack_select._get_upgrade_stack()
if upgrade_stack is None:
raise Fail('Unable to determine the stack and stack version')
@@ -139,7 +139,7 @@ class RangerAdmin(Script):
import params
env.set_params(params)
- upgrade_stack = hdp_select._get_upgrade_stack()
+ upgrade_stack = stack_select._get_upgrade_stack()
if upgrade_stack is None:
raise Fail('Unable to determine the stack and stack version')
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
index 64549c3..ed8b690 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
@@ -20,7 +20,7 @@ limitations under the License.
"""
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.format import format
def prestart(env, hdp_component):
@@ -28,4 +28,4 @@ def prestart(env, hdp_component):
if params.version and params.stack_is_hdp22_or_further:
conf_select.select(params.stack_name, hdp_component, params.version)
- hdp_select.select(hdp_component, params.version)
+ stack_select.select(hdp_component, params.version)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index 95f0896..30eda0b 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -19,7 +19,7 @@ limitations under the License.
"""
import os
from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
@@ -30,9 +30,9 @@ stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
-stack_is_hdp23_or_further = Script.is_hdp_stack_greater_or_equal("2.3")
+stack_is_hdp23_or_further = Script.is_stack_greater_or_equal("2.3")
if stack_is_hdp23_or_further:
kms_home = '/usr/hdp/current/ranger-kms'
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
index 41885bb..798e8f7 100644
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
@@ -19,7 +19,7 @@ limitations under the License.
"""
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.format import format
def prestart(env, hdp_component):
@@ -27,4 +27,4 @@ def prestart(env, hdp_component):
if params.version and params.stack_is_hdp23_or_further:
conf_select.select(params.stack_name, hdp_component, params.version)
- hdp_select.select(hdp_component, params.version)
+ stack_select.select(hdp_component, params.version)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index 7b6a490..cc08a6f 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -20,7 +20,7 @@ limitations under the License.
from ambari_commons.os_check import OSCheck
from resource_management.libraries.functions import format
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script
@@ -41,7 +41,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
version = default("/commandParams/version", None)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
#hadoop params
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 132ff77..b1cec11 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -18,7 +18,7 @@ limitations under the License.
"""
from resource_management.libraries.resources import HdfsResource
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
@@ -31,7 +31,7 @@ slider_home_dir = '/usr/hdp/current/slider-client'
#hadoop params
slider_bin_dir = "/usr/lib/slider/bin"
-if Script.is_hdp_stack_greater_or_equal("2.2"):
+if Script.is_stack_greater_or_equal("2.2"):
slider_bin_dir = format('{slider_home_dir}/bin')
slider_conf_dir = format("{slider_home_dir}/conf")
@@ -52,7 +52,7 @@ hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_site = config['configurations']['hdfs-site']
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
index 1aed032..b93b0eb 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/service_check.py
@@ -38,7 +38,7 @@ class SliderServiceCheck(Script):
import params
env.set_params(params)
- if Script.is_hdp_stack_greater_or_equal("2.2"):
+ if Script.is_stack_greater_or_equal("2.2"):
copy_to_hdfs("slider", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
smokeuser_kinit_cmd = format(
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py
index b487259..f090583 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider.py
@@ -81,7 +81,7 @@ def slider():
File(format("{params.slider_conf_dir}/log4j.properties"),
mode=0644
)
- if Script.is_hdp_stack_greater_or_equal("2.2"):
+ if Script.is_stack_greater_or_equal("2.2"):
File(params.slider_tar_gz,
owner=params.hdfs_user,
group=params.user_group,
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
index 5865048..f584a12 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
@@ -20,7 +20,7 @@ limitations under the License.
from resource_management import *
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from slider import slider
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -38,15 +38,15 @@ class SliderClientLinux(SliderClient):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
conf_select.select(params.stack_name, "slider", params.version)
- hdp_select.select("slider-client", params.version)
+ stack_select.select("slider-client", params.version)
# also set all of the hadoop clients since slider client is upgraded as
# part of the final "CLIENTS" group and we need to ensure that
# hadoop-client is also set
conf_select.select(params.stack_name, "hadoop", params.version)
- hdp_select.select("hadoop-client", params.version)
+ stack_select.select("hadoop-client", params.version)
def install(self, env):
self.install_packages(env)
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
index 24b86e3..bc1d6ab 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
@@ -23,8 +23,8 @@ import os
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
from resource_management.libraries.functions.check_process_status import check_process_status
from resource_management.core.logger import Logger
@@ -74,15 +74,15 @@ class JobHistoryServer(Script):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
Logger.info("Executing Spark Job History Server Stack Upgrade pre-restart")
conf_select.select(params.stack_name, "spark", params.version)
- hdp_select.select("spark-historyserver", params.version)
+ stack_select.select("spark-historyserver", params.version)
# Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
# need to copy the tarball, otherwise, copy it.
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') < 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') < 0:
resource_created = copy_to_hdfs(
"tez",
params.user_group,
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 7bf1f1c..843d8e7 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -25,10 +25,10 @@ from setup_spark import *
import resource_management.libraries.functions
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
+from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import format
-from resource_management.libraries.functions.get_hdp_version import get_hdp_version
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.get_stack_version import get_stack_version
+from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
@@ -49,7 +49,7 @@ tmp_dir = Script.get_tmp_dir()
stack_name = default("/hostLevelParams/stack_name", None)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
@@ -58,16 +58,16 @@ version = default("/commandParams/version", None)
# TODO! FIXME! Version check is not working as of today :
# $ yum list installed | grep hdp-select
# hdp-select.noarch 2.2.1.0-2340.el6 @HDP-2.2
-# And hdp_stack_version returned from hostLevelParams/stack_version is : 2.2.0.0
+# And stack_version_formatted returned from hostLevelParams/stack_version is : 2.2.0.0
# Commenting out for time being
-#stack_is_hdp22_or_further = hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2.1.0') >= 0
+#stack_is_hdp22_or_further = stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2.1.0') >= 0
spark_conf = '/etc/spark/conf'
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hdp_select.get_hadoop_dir("bin")
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
-if Script.is_hdp_stack_greater_or_equal("2.2"):
- hadoop_home = hdp_select.get_hadoop_dir("home")
+if Script.is_stack_greater_or_equal("2.2"):
+ hadoop_home = stack_select.get_hadoop_dir("home")
spark_conf = format("/usr/hdp/current/{component_directory}/conf")
spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
spark_pid_dir = status_params.spark_pid_dir
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 19565e7..b585f71 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -27,7 +27,7 @@ from resource_management.core.exceptions import ComponentIsNotRunning
from resource_management.core.logger import Logger
from resource_management.core import shell
from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
def setup_spark(env, type, upgrade_type = None, action = None):
import params
@@ -99,9 +99,9 @@ def setup_spark(env, type, upgrade_type = None, action = None):
key_value_delimiter = " ",
)
- effective_version = params.version if upgrade_type is not None else params.hdp_stack_version
+ effective_version = params.version if upgrade_type is not None else params.stack_version_formatted
if effective_version:
- effective_version = format_hdp_stack_version(effective_version)
+ effective_version = format_stack_version(effective_version)
if params.spark_thrift_fairscheduler_content and effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
# create spark-thrift-fairscheduler.xml
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
index fe8cfc4..0d22908 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_client.py
@@ -21,8 +21,8 @@ limitations under the License.
import sys
from resource_management import *
from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import hdp_select
-from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.version import compare_versions, format_stack_version
from resource_management.core.exceptions import ComponentIsNotRunning
from resource_management.core.logger import Logger
from resource_management.core import shell
@@ -50,10 +50,10 @@ class SparkClient(Script):
import params
env.set_params(params)
- if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0:
Logger.info("Executing Spark Client Stack Upgrade pre-restart")
conf_select.select(params.stack_name, "spark", params.version)
- hdp_select.select("spark-client", params.version)
+ stack_select.select("spark-client", params.version)
if __name__ == "__main__":
SparkClient().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index a428209..32103ae 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -25,16 +25,16 @@ from resource_management.libraries.functions.version import compare_versions
from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
from resource_management.libraries.functions import format
from resource_management.core.resources.system import File, Execute
-from resource_management.libraries.functions.version import format_hdp_stack_version
+from resource_management.libraries.functions.version import format_stack_version
def spark_service(name, upgrade_type=None, action=None):
import params
if action == 'start':
- effective_version = params.version if upgrade_type is not None else params.hdp_stack_version
+ effective_version = params.version if upgrade_type is not None else params.stack_version_formatted
if effective_version:
- effective_version = format_hdp_stack_version(effective_version)
+ effective_version = format_stack_version(effective_version)
if effective_version and compare_versions(effective_version, '2.4.0.0') >= 0:
# copy spark-hdp-assembly.jar to hdfs
@@ -56,7 +56,7 @@ def spark_service(name, upgrade_type=None, action=None):
# Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not have a dependency on Tez, so it does not
# need to copy the tarball, otherwise, copy it.
- if params.hdp_stack_version and compare_versions(params.hdp_stack_version, '2.3.0.0') < 0:
+ if params.stack_version_formatted and compare_versions(params.stack_version_formatted, '2.3.0.0') < 0:
resource_created = copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped)
if resource_created:
params.HdfsResource(None, action="execute")