You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2016/03/08 06:08:23 UTC
[2/2] ambari git commit: AMBARI-15314. Introduce possibility to retry
stack installation in case of network instability (Dmytro Grinenko vi
alejandro)
AMBARI-15314. Introduce possibility to retry stack installation in case of network instability (Dmytro Grinenko vi alejandro)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7e81d376
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7e81d376
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7e81d376
Branch: refs/heads/trunk
Commit: 7e81d37669aefa4aea6c1ab8f1aebbb1e23768d9
Parents: 549e70e
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Mon Mar 7 20:53:55 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Mon Mar 7 20:53:55 2016 -0800
----------------------------------------------------------------------
.../resource_management/TestPackageResource.py | 6 +
.../python/resource_management/TestScript.py | 12 +-
.../src/main/python/ambari_commons/str_utils.py | 18 ++
.../core/providers/package/__init__.py | 73 +++--
.../core/providers/package/apt.py | 11 +-
.../core/providers/package/yumrpm.py | 6 +-
.../core/providers/package/zypper.py | 11 +-
.../core/resources/packaging.py | 12 +-
.../libraries/script/script.py | 8 +-
ambari-server/conf/unix/ambari.properties | 5 +
.../ambari/server/agent/ExecutionCommand.java | 2 +
.../ambari/server/agent/HeartbeatMonitor.java | 2 -
.../server/configuration/Configuration.java | 270 ++++++++++---------
.../controller/AmbariActionExecutionHelper.java | 8 +-
.../AmbariCustomCommandExecutionHelper.java | 4 +
.../HBASE/0.96.0.2.0/package/scripts/hbase.py | 4 +-
.../0.96.0.2.0/package/scripts/params_linux.py | 4 +
.../HDFS/2.1.0.2.0/package/scripts/hdfs.py | 4 +-
.../2.1.0.2.0/package/scripts/params_linux.py | 3 +
.../0.12.0.2.0/package/scripts/params_linux.py | 3 +
.../package/scripts/setup_atlas_hive.py | 2 +-
.../OOZIE/4.0.0.2.0/package/scripts/oozie.py | 4 +-
.../4.0.0.2.0/package/scripts/params_linux.py | 3 +
.../custom_actions/scripts/install_packages.py | 15 +-
.../0.8/hooks/before-INSTALL/scripts/params.py | 4 +
.../scripts/shared_initialization.py | 5 +-
.../hooks/before-INSTALL/scripts/params.py | 4 +
.../scripts/shared_initialization.py | 4 +-
.../AmbariManagementControllerImplTest.java | 2 +-
...ClusterStackVersionResourceProviderTest.java | 3 +
.../custom_actions/TestInstallPackages.py | 64 ++---
.../configs/install_packages_config.json | 2 +
.../stacks/2.0.6/HBASE/test_hbase_master.py | 12 +-
.../2.0.6/HBASE/test_hbase_regionserver.py | 2 +-
.../stacks/2.0.6/configs/altfs_plus_hdfs.json | 2 +
.../stacks/2.0.6/configs/client-upgrade.json | 2 +
.../2.0.6/configs/default.hbasedecom.json | 2 +
.../python/stacks/2.0.6/configs/default.json | 5 +-
.../stacks/2.0.6/configs/default_client.json | 2 +
.../2.0.6/configs/default_hive_nn_ha.json | 2 +
.../2.0.6/configs/default_hive_nn_ha_2.json | 2 +
.../2.0.6/configs/default_hive_non_hdfs.json | 2 +
.../2.0.6/configs/default_no_install.json | 2 +
.../2.0.6/configs/default_oozie_mysql.json | 2 +
.../default_update_exclude_file_only.json | 2 +
.../2.0.6/configs/ha_bootstrap_active_node.json | 2 +
.../configs/ha_bootstrap_standby_node.json | 2 +
...ha_bootstrap_standby_node_initial_start.json | 2 +
.../python/stacks/2.0.6/configs/ha_default.json | 2 +
.../python/stacks/2.0.6/configs/ha_secured.json | 2 +
.../python/stacks/2.0.6/configs/hbase-2.2.json | 2 +
.../stacks/2.0.6/configs/hbase-check-2.2.json | 2 +
.../stacks/2.0.6/configs/hbase-preupgrade.json | 2 +
.../2.0.6/configs/hbase-rs-2.2-phoenix.json | 2 +
.../stacks/2.0.6/configs/hbase-rs-2.2.json | 2 +
.../stacks/2.0.6/configs/hbase_no_phx.json | 2 +
.../stacks/2.0.6/configs/hbase_with_phx.json | 2 +
.../test/python/stacks/2.0.6/configs/nn_eu.json | 2 +
.../stacks/2.0.6/configs/nn_eu_standby.json | 2 +
.../python/stacks/2.0.6/configs/nn_ru_lzo.json | 2 +
.../2.0.6/configs/oozie_existing_sqla.json | 2 +
.../2.0.6/configs/ranger-namenode-start.json | 2 +
.../2.0.6/configs/rebalancehdfs_default.json | 2 +
.../2.0.6/configs/rebalancehdfs_secured.json | 2 +
.../python/stacks/2.0.6/configs/secured.json | 2 +
.../stacks/2.0.6/configs/secured_client.json | 2 +
.../hooks/before-INSTALL/test_before_install.py | 4 +-
.../stacks/2.1/configs/client-upgrade.json | 2 +
.../test/python/stacks/2.1/configs/default.json | 2 +
.../2.1/configs/hive-metastore-upgrade.json | 2 +
.../test/python/stacks/2.1/configs/secured.json | 2 +
.../test/python/stacks/2.2/configs/default.json | 2 +
.../python/stacks/2.2/configs/hive-upgrade.json | 2 +
.../journalnode-upgrade-hdfs-secure.json | 2 +
.../stacks/2.2/configs/journalnode-upgrade.json | 2 +
.../stacks/2.2/configs/oozie-downgrade.json | 2 +
.../stacks/2.2/configs/oozie-upgrade.json | 2 +
.../test/python/stacks/2.2/configs/secured.json | 2 +
.../src/test/python/stacks/2.3/PXF/test_pxf.py | 20 +-
.../stacks/2.3/configs/default.hbasedecom.json | 2 +
.../test/python/stacks/2.3/configs/default.json | 2 +
.../stacks/2.3/configs/hbase_default.json | 2 +
.../python/stacks/2.3/configs/hbase_secure.json | 2 +
.../python/stacks/2.3/configs/pxf_default.json | 2 +
84 files changed, 488 insertions(+), 227 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-agent/src/test/python/resource_management/TestPackageResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestPackageResource.py b/ambari-agent/src/test/python/resource_management/TestPackageResource.py
index 1d8ef01..1f2250d 100644
--- a/ambari-agent/src/test/python/resource_management/TestPackageResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestPackageResource.py
@@ -93,6 +93,7 @@ class TestPackageResource(TestCase):
@patch.object(shell, "checked_call")
@patch.object(System, "os_family", new = 'redhat')
def test_action_install_rhel(self, shell_mock):
+ shell_mock.return_value = (0,'')
sys.modules['rpm'] = MagicMock()
sys.modules['rpm'].TransactionSet.return_value = MagicMock()
sys.modules['rpm'].TransactionSet.return_value.dbMatch.return_value = [{'name':'some_packag'}]
@@ -106,6 +107,7 @@ class TestPackageResource(TestCase):
@patch.object(shell, "checked_call")
@patch.object(System, "os_family", new = 'redhat')
def test_action_install_pattern_rhel(self, shell_mock):
+ shell_mock.return_value = (0,'')
sys.modules['rpm'] = MagicMock()
sys.modules['rpm'].TransactionSet.return_value = MagicMock()
sys.modules['rpm'].TransactionSet.return_value.dbMatch.return_value = [{'name':'some_packag'}]
@@ -118,12 +120,14 @@ class TestPackageResource(TestCase):
@patch.object(shell, "checked_call")
@patch.object(System, "os_family", new = 'redhat')
def test_action_install_pattern_installed_rhel(self, shell_mock):
+ shell_mock.return_value = (0,'')
sys.modules['yum'] = MagicMock()
sys.modules['yum'].YumBase.return_value = MagicMock()
sys.modules['yum'].YumBase.return_value.rpmdb = MagicMock()
sys.modules['yum'].YumBase.return_value.rpmdb.simplePkgList.return_value = [('some_package_1_2_3',)]
with Environment('/') as env:
Package("some_package*",
+ logoutput = False
)
self.assertEqual(shell_mock.call_count, 0, "shell.checked_call shouldn't be called")
@@ -179,6 +183,7 @@ class TestPackageResource(TestCase):
@patch.object(shell, "checked_call")
@patch.object(System, "os_family", new = 'redhat')
def test_action_install_use_repos_rhel(self, shell_mock):
+ shell_mock.return_value = (0,'')
with Environment('/') as env:
Package("some_package", use_repos=['HDP-UTILS-2.2.0.1-885', 'HDP-2.2.0.1-885'],
logoutput = False
@@ -233,6 +238,7 @@ class TestPackageResource(TestCase):
@patch.object(shell, "checked_call")
@patch.object(System, "os_family", new = 'redhat')
def test_action_install_version_attr(self, shell_mock):
+ shell_mock.return_value = (0,'')
with Environment('/') as env:
Package("some_package",
version = "3.5.0",
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-agent/src/test/python/resource_management/TestScript.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestScript.py b/ambari-agent/src/test/python/resource_management/TestScript.py
index f6a5c8c..adb8501 100644
--- a/ambari-agent/src/test/python/resource_management/TestScript.py
+++ b/ambari-agent/src/test/python/resource_management/TestScript.py
@@ -48,13 +48,17 @@ class TestScript(TestCase):
def test_install_packages(self, package_provider_mock):
no_packages_config = {
'hostLevelParams' : {
- 'repo_info' : "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
+ 'repo_info' : "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
+ 'agent_stack_retry_count': '5',
+ 'agent_stack_retry_on_unavailability': 'false'
}
}
empty_config = {
'hostLevelParams' : {
'package_list' : '',
- 'repo_info' : "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
+ 'repo_info' : "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
+ 'agent_stack_retry_count': '5',
+ 'agent_stack_retry_on_unavailability': 'false'
}
}
dummy_config = {
@@ -62,7 +66,9 @@ class TestScript(TestCase):
'package_list' : "[{\"type\":\"rpm\",\"name\":\"hbase\", \"condition\": \"\"},"
"{\"type\":\"rpm\",\"name\":\"yet-another-package\", \"condition\": \"\"}]",
'repo_info' : "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
- 'service_repo_info' : "[{\"mirrorsList\":\"abc\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
+ 'service_repo_info' : "[{\"mirrorsList\":\"abc\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]",
+ 'agent_stack_retry_count': '5',
+ 'agent_stack_retry_on_unavailability': 'false'
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/ambari_commons/str_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/str_utils.py b/ambari-common/src/main/python/ambari_commons/str_utils.py
index 538d7c6..1a3e4f2 100644
--- a/ambari-common/src/main/python/ambari_commons/str_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/str_utils.py
@@ -18,17 +18,20 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
+
def compress_backslashes(s):
s1 = s
while (-1 != s1.find('\\\\')):
s1 = s1.replace('\\\\', '\\')
return s1
+
def ensure_double_backslashes(s):
s1 = compress_backslashes(s)
s2 = s1.replace('\\', '\\\\')
return s2
+
def cbool(obj):
"""
Interprets an object as a boolean value.
@@ -44,3 +47,18 @@ def cbool(obj):
raise ValueError('Unable to interpret value "%s" as boolean' % obj)
return bool(obj)
+
+def cint(obj):
+ """
+ Interprets an object as a integer value.
+ :param obj:
+ :return:
+ """
+ if isinstance(obj, str):
+ obj = obj.strip().lower()
+ try:
+ return int(obj)
+ except ValueError:
+ raise ValueError('Unable to interpret value "%s" as integer' % obj)
+ return int(obj)
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/resource_management/core/providers/package/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/package/__init__.py b/ambari-common/src/main/python/resource_management/core/providers/package/__init__.py
index 1fc4214..04da9b6 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/package/__init__.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/package/__init__.py
@@ -31,7 +31,10 @@ from resource_management.core.logger import Logger
from resource_management.core.utils import suppress_stdout
from resource_management.core import shell
-PACKAGE_MANAGER_LOCK_ACQUIRED = "Package manager lock is acquired. Retrying after {0} seconds. Reason: {1}"
+
+PACKAGE_MANAGER_LOCK_ACQUIRED_MSG = "Cannot obtain lock for Package manager. Retrying after {0} seconds. Reason: {1}"
+PACKAGE_MANAGER_REPO_ERROR_MSG = "Cannot download the package due to repository unavailability. Retrying after {0} seconds. Reason: {1}"
+
class PackageProvider(Provider):
def __init__(self, *args, **kwargs):
@@ -39,8 +42,10 @@ class PackageProvider(Provider):
def install_package(self, name, version):
raise NotImplementedError()
+
def remove_package(self, name):
raise NotImplementedError()
+
def upgrade_package(self, name, version):
raise NotImplementedError()
@@ -61,41 +66,63 @@ class PackageProvider(Provider):
return self.resource.package_name + '-' + self.resource.version
else:
return self.resource.package_name
-
+
+ def is_locked_output(self, out):
+ return False
+
+ def is_repo_error_output(self, out):
+ return False
+
def get_logoutput(self):
return self.resource.logoutput==True and Logger.logger.isEnabledFor(logging.INFO) or self.resource.logoutput==None and Logger.logger.isEnabledFor(logging.DEBUG)
-
- def call_until_not_locked(self, cmd, **kwargs):
- return self.wait_until_not_locked(cmd, is_checked=False, **kwargs)
+
+ def call_with_retries(self, cmd, **kwargs):
+ return self._call_with_retries(cmd, is_checked=False, **kwargs)
- def checked_call_until_not_locked(self, cmd, **kwargs):
- return self.wait_until_not_locked(cmd, is_checked=True, **kwargs)
-
- def wait_until_not_locked(self, cmd, is_checked=True, **kwargs):
+ def checked_call_with_retries(self, cmd, **kwargs):
+ return self._call_with_retries(cmd, is_checked=True, **kwargs)
+
+ def _call_with_retries(self, cmd, is_checked=True, **kwargs):
func = shell.checked_call if is_checked else shell.call
-
- for i in range(self.resource.locked_tries):
- is_last_time = (i == self.resource.locked_tries - 1)
+
+ for i in range(self.resource.retry_count):
+ is_last_time = (i == self.resource.retry_count - 1)
try:
code, out = func(cmd, **kwargs)
except Fail as ex:
# non-lock error
- if not self.is_locked_output(str(ex)) or is_last_time:
+ if not self._is_handled_error(str(ex), is_last_time) or is_last_time:
raise
-
- Logger.info(PACKAGE_MANAGER_LOCK_ACQUIRED.format(self.resource.locked_try_sleep, str(ex)))
+
+ self._notify_about_handled_error(str(ex), is_last_time)
else:
# didn't fail or failed with non-lock error.
- if not code or not self.is_locked_output(out):
- break
-
- Logger.info(PACKAGE_MANAGER_LOCK_ACQUIRED.format(self.resource.locked_try_sleep, str(out)))
-
- time.sleep(self.resource.locked_try_sleep)
+ if not code or not self._is_handled_error(out, is_last_time):
+ break
+
+ self._notify_about_handled_error(str(out), is_last_time)
+
+ time.sleep(self.resource.retry_sleep)
return code, out
-
-
+
+ def _is_handled_error(self, output, is_last_time):
+ if self.resource.retry_on_locked and self.is_locked_output(output):
+ return True
+ elif self.resource.retry_on_repo_unavailability and self.is_repo_error_output(output):
+ return True
+
+ return False
+
+ def _notify_about_handled_error(self, output, is_last_time):
+ if is_last_time:
+ return
+
+ if self.resource.retry_on_locked and self.is_locked_output(output):
+ Logger.info(PACKAGE_MANAGER_LOCK_ACQUIRED_MSG.format(self.resource.retry_sleep, str(output)))
+ elif self.resource.retry_on_repo_unavailability and self.is_repo_error_output(output):
+ Logger.info(PACKAGE_MANAGER_REPO_ERROR_MSG.format(self.resource.retry_sleep, str(output)))
+
def yum_check_package_available(self, name):
"""
Does the same as rpm_check_package_avaiable, but faster.
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/resource_management/core/providers/package/apt.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/package/apt.py b/ambari-common/src/main/python/resource_management/core/providers/package/apt.py
index ea8ad98..476e39b 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/package/apt.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/package/apt.py
@@ -78,7 +78,7 @@ class AptProvider(PackageProvider):
cmd = cmd + [name]
Logger.info("Installing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
- code, out = self.call_until_not_locked(cmd, sudo=True, env=INSTALL_CMD_ENV, logoutput=self.get_logoutput())
+ code, out = self.call_with_retries(cmd, sudo=True, env=INSTALL_CMD_ENV, logoutput=self.get_logoutput())
if self.is_locked_output(out):
err_msg = Logger.filter_text("Execution of '%s' returned %d. %s" % (cmd, code, out))
@@ -88,13 +88,13 @@ class AptProvider(PackageProvider):
if code:
Logger.info("Execution of '%s' returned %d. %s" % (cmd, code, out))
Logger.info("Failed to install package %s. Executing `%s`" % (name, string_cmd_from_args_list(REPO_UPDATE_CMD)))
- code, out = self.call_until_not_locked(REPO_UPDATE_CMD, sudo=True, logoutput=self.get_logoutput())
+ code, out = self.call_with_retries(REPO_UPDATE_CMD, sudo=True, logoutput=self.get_logoutput())
if code:
Logger.info("Execution of '%s' returned %d. %s" % (REPO_UPDATE_CMD, code, out))
Logger.info("Retrying to install package %s" % (name))
- self.checked_call_until_not_locked(cmd, sudo=True, env=INSTALL_CMD_ENV, logoutput=self.get_logoutput())
+ self.checked_call_with_retries(cmd, sudo=True, env=INSTALL_CMD_ENV, logoutput=self.get_logoutput())
if is_tmp_dir_created:
for temporal_sources_file in copied_sources_files:
@@ -108,6 +108,9 @@ class AptProvider(PackageProvider):
def is_locked_output(self, out):
return "Unable to lock the administration directory" in out
+ def is_repo_error_output(self, out):
+ return "Failure when receiving data from the peer" in out
+
@replace_underscores
def upgrade_package(self, name, use_repos=[], skip_repos=[], is_upgrade=True):
return self.install_package(name, use_repos, skip_repos, is_upgrade)
@@ -117,7 +120,7 @@ class AptProvider(PackageProvider):
if self._check_existence(name):
cmd = REMOVE_CMD[self.get_logoutput()] + [name]
Logger.info("Removing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
- self.checked_call_until_not_locked(cmd, sudo=True, logoutput=self.get_logoutput())
+ self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput())
else:
Logger.info("Skipping removal of non-existing package %s" % (name))
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py b/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
index ea86395..0739f66 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
@@ -46,7 +46,7 @@ class YumProvider(PackageProvider):
cmd = cmd + [disable_repo_option, enable_repo_option]
cmd = cmd + [name]
Logger.info("Installing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
- shell.checked_call(cmd, sudo=True, logoutput=self.get_logoutput())
+ self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput())
else:
Logger.info("Skipping installation of existing package %s" % (name))
@@ -61,6 +61,10 @@ class YumProvider(PackageProvider):
else:
Logger.info("Skipping removal of non-existing package %s" % (name))
+ def is_repo_error_output(self, out):
+ return "Failure when receiving data from the peer" in out or \
+ "No more mirrors to try" in out
+
def _check_existence(self, name):
"""
For regexp names:
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/resource_management/core/providers/package/zypper.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/package/zypper.py b/ambari-common/src/main/python/resource_management/core/providers/package/zypper.py
index d0f3198..2d00b0d 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/package/zypper.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/package/zypper.py
@@ -54,7 +54,7 @@ class ZypperProvider(PackageProvider):
cmd = cmd + [name]
Logger.info("Installing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
- self.checked_call_until_not_locked(cmd, sudo=True, logoutput=self.get_logoutput())
+ self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput())
else:
Logger.info("Skipping installation of existing package %s" % (name))
@@ -65,12 +65,12 @@ class ZypperProvider(PackageProvider):
if self._check_existence(name):
cmd = REMOVE_CMD[self.get_logoutput()] + [name]
Logger.info("Removing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))
- self.checked_call_until_not_locked(cmd, sudo=True, logoutput=self.get_logoutput())
+ self.checked_call_with_retries(cmd, sudo=True, logoutput=self.get_logoutput())
else:
Logger.info("Skipping removal of non-existing package %s" % (name))
def get_active_base_repos(self):
- (code, output) = self.call_until_not_locked(LIST_ACTIVE_REPOS_CMD)
+ (code, output) = self.call_with_retries(LIST_ACTIVE_REPOS_CMD)
enabled_repos = []
if not code:
for line in output.split('\n')[2:]:
@@ -81,9 +81,12 @@ class ZypperProvider(PackageProvider):
return [line_list[1].strip()]
return enabled_repos
- def is_locked_output(self ,out):
+ def is_locked_output(self, out):
return "System management is locked by the application" in out
+ def is_repo_error_output(self, out):
+ return "Failure when receiving data from the peer" in out
+
def _check_existence(self, name):
"""
For regexp names:
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/resource_management/core/resources/packaging.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/resources/packaging.py b/ambari-common/src/main/python/resource_management/core/resources/packaging.py
index bb0aa56..e3adc30 100644
--- a/ambari-common/src/main/python/resource_management/core/resources/packaging.py
+++ b/ambari-common/src/main/python/resource_management/core/resources/packaging.py
@@ -22,7 +22,7 @@ Ambari Agent
__all__ = ["Package"]
-from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
class Package(Resource):
@@ -41,11 +41,13 @@ class Package(Resource):
logoutput = ResourceArgument(default=None)
"""
- Retry if package manager is locked. (usually another process is running).
- Note that this works only for apt-get and zypper, while yum manages lock retries itself.
+ Retry if package manager is locked or unavailable.
+ Note that retry_on_lock works only for apt-get and zypper, while yum manages lock retries itself.
"""
- locked_tries = ResourceArgument(default=8)
- locked_try_sleep = ResourceArgument(default=30) # seconds
+ retry_count = ResourceArgument(default=4)
+ retry_sleep = ResourceArgument(default=30)
+ retry_on_repo_unavailability = BooleanArgument(default=False)
+ retry_on_locked = BooleanArgument(default=True)
version = ResourceArgument()
actions = ["install", "upgrade", "remove"]
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index 3bef342..5e76562 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -432,6 +432,7 @@ class Script(object):
NOTE: regexes don't have Python syntax, but simple package regexes which support only * and .* and ?
"""
config = self.get_config()
+
if 'host_sys_prepped' in config['hostLevelParams']:
# do not install anything on sys-prepped host
if config['hostLevelParams']['host_sys_prepped'] == True:
@@ -440,6 +441,9 @@ class Script(object):
pass
try:
package_list_str = config['hostLevelParams']['package_list']
+ agent_stack_retry_on_unavailability = bool(config['hostLevelParams']['agent_stack_retry_on_unavailability'])
+ agent_stack_retry_count = int(config['hostLevelParams']['agent_stack_retry_count'])
+
if isinstance(package_list_str, basestring) and len(package_list_str) > 0:
package_list = json.loads(package_list_str)
for package in package_list:
@@ -452,7 +456,9 @@ class Script(object):
if "ambari-metrics" in name:
Package(name)
else:
- Package(name)
+ Package(name,
+ retry_on_repo_unavailability=agent_stack_retry_on_unavailability,
+ retry_count=agent_stack_retry_count)
except KeyError:
pass # No reason to worry
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/conf/unix/ambari.properties
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/ambari.properties b/ambari-server/conf/unix/ambari.properties
index ba5090c..92dec24 100644
--- a/ambari-server/conf/unix/ambari.properties
+++ b/ambari-server/conf/unix/ambari.properties
@@ -75,6 +75,11 @@ agent.task.timeout=900
# Default timeout in seconds before package installation task is killed
agent.package.install.task.timeout=1800
+# Enables package installation retry on repository unavailability error
+agent.stack.retry.on_repo_unavailability=false
+# Default count of tries
+agent.stack.retry.tries=5
+
# Default timeout in seconds before a server-side task is killed
server.task.timeout=1200
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index a540a36..788e9c3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -371,6 +371,8 @@ public class ExecutionCommand extends AgentCommand {
String HOST_SYS_PREPPED = "host_sys_prepped";
String MAX_DURATION_OF_RETRIES = "max_duration_for_retries";
String COMMAND_RETRY_ENABLED = "command_retry_enabled";
+ String AGENT_STACK_RETRY_ON_UNAVAILABILITY = "agent_stack_retry_on_unavailability";
+ String AGENT_STACK_RETRY_COUNT = "agent_stack_retry_count";
/**
* Comma separated list of config-types whose tags have be refreshed
* at runtime before being executed. If all config-type tags have to be
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 378e123..a902a2c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -33,7 +33,6 @@ import java.util.List;
import java.util.Map;
import java.util.TreeMap;
-import com.google.inject.Inject;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.actionmanager.ActionManager;
@@ -341,7 +340,6 @@ public class HeartbeatMonitor implements Runnable {
hostLevelParams.put(STACK_NAME, stackId.getStackName());
hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
-
if (statusCmd.getPayloadLevel() == StatusCommand.StatusCommandPayload.EXECUTION_COMMAND) {
ExecutionCommand ec = ambariManagementController.getExecutionCommand(cluster, sch, RoleCommand.START);
statusCmd.setExecutionCommand(ec);
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 17fb42d..92d4f47 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -591,8 +591,16 @@ public class Configuration {
*/
private static final int VERSION_DEFINITION_READ_TIMEOUT_DEFAULT = 5000;
+ /**
+ * For Agent Stack Install retry configuration
+ */
+ public static final String AGENT_STACK_RETRY_ON_REPO_UNAVAILABILITY_KEY = "agent.stack.retry.on_repo_unavailability";
+ public static final String AGENT_STACK_RETRY_ON_REPO_UNAVAILABILITY_DEFAULT = "false";
+ public static final String AGENT_STACK_RETRY_COUNT_KEY = "agent.stack.retry.tries";
+ public static final String AGENT_STACK_RETRY_COUNT_DEFAULT = "5";
+
private static final Logger LOG = LoggerFactory.getLogger(
- Configuration.class);
+ Configuration.class);
private Properties properties;
private JsonObject hostChangesJson;
@@ -625,7 +633,7 @@ public class Configuration {
SQL_ANYWHERE("sqlanywhere");
private static final Map<String, DatabaseType> m_mappedTypes =
- new HashMap<String, Configuration.DatabaseType>(5);
+ new HashMap<String, Configuration.DatabaseType>(5);
static {
for (DatabaseType databaseType : EnumSet.allOf(DatabaseType.class)) {
@@ -711,59 +719,59 @@ public class Configuration {
CHECK_MOUNTS_TIMEOUT_KEY, CHECK_MOUNTS_TIMEOUT_DEFAULT));
agentConfigsMap.put(ENABLE_AUTO_AGENT_CACHE_UPDATE_KEY, properties.getProperty(
- ENABLE_AUTO_AGENT_CACHE_UPDATE_KEY, ENABLE_AUTO_AGENT_CACHE_UPDATE_DEFAULT));
+ ENABLE_AUTO_AGENT_CACHE_UPDATE_KEY, ENABLE_AUTO_AGENT_CACHE_UPDATE_DEFAULT));
configsMap = new HashMap<String, String>();
configsMap.putAll(agentConfigsMap);
configsMap.put(AMBARI_PYTHON_WRAP_KEY, properties.getProperty(
- AMBARI_PYTHON_WRAP_KEY, AMBARI_PYTHON_WRAP_DEFAULT));
+ AMBARI_PYTHON_WRAP_KEY, AMBARI_PYTHON_WRAP_DEFAULT));
configsMap.put(SRVR_TWO_WAY_SSL_KEY, properties.getProperty(
- SRVR_TWO_WAY_SSL_KEY, SRVR_TWO_WAY_SSL_DEFAULT));
+ SRVR_TWO_WAY_SSL_KEY, SRVR_TWO_WAY_SSL_DEFAULT));
configsMap.put(SRVR_TWO_WAY_SSL_PORT_KEY, properties.getProperty(
- SRVR_TWO_WAY_SSL_PORT_KEY, SRVR_TWO_WAY_SSL_PORT_DEFAULT));
+ SRVR_TWO_WAY_SSL_PORT_KEY, SRVR_TWO_WAY_SSL_PORT_DEFAULT));
configsMap.put(SRVR_ONE_WAY_SSL_PORT_KEY, properties.getProperty(
- SRVR_ONE_WAY_SSL_PORT_KEY, SRVR_ONE_WAY_SSL_PORT_DEFAULT));
+ SRVR_ONE_WAY_SSL_PORT_KEY, SRVR_ONE_WAY_SSL_PORT_DEFAULT));
configsMap.put(SRVR_KSTR_DIR_KEY, properties.getProperty(
- SRVR_KSTR_DIR_KEY, SRVR_KSTR_DIR_DEFAULT));
+ SRVR_KSTR_DIR_KEY, SRVR_KSTR_DIR_DEFAULT));
configsMap.put(SRVR_CRT_NAME_KEY, properties.getProperty(
- SRVR_CRT_NAME_KEY, SRVR_CRT_NAME_DEFAULT));
+ SRVR_CRT_NAME_KEY, SRVR_CRT_NAME_DEFAULT));
configsMap.put(SRVR_KEY_NAME_KEY, properties.getProperty(
SRVR_KEY_NAME_KEY, SRVR_KEY_NAME_DEFAULT));
configsMap.put(SRVR_CSR_NAME_KEY, properties.getProperty(
SRVR_CSR_NAME_KEY, SRVR_CSR_NAME_DEFAULT));
configsMap.put(KSTR_NAME_KEY, properties.getProperty(
- KSTR_NAME_KEY, KSTR_NAME_DEFAULT));
+ KSTR_NAME_KEY, KSTR_NAME_DEFAULT));
configsMap.put(KSTR_TYPE_KEY, properties.getProperty(
- KSTR_TYPE_KEY, KSTR_TYPE_DEFAULT));
+ KSTR_TYPE_KEY, KSTR_TYPE_DEFAULT));
configsMap.put(TSTR_NAME_KEY, properties.getProperty(
- TSTR_NAME_KEY, TSTR_NAME_DEFAULT));
+ TSTR_NAME_KEY, TSTR_NAME_DEFAULT));
configsMap.put(TSTR_TYPE_KEY, properties.getProperty(
- TSTR_TYPE_KEY, TSTR_TYPE_DEFAULT));
+ TSTR_TYPE_KEY, TSTR_TYPE_DEFAULT));
configsMap.put(SRVR_CRT_PASS_FILE_KEY, properties.getProperty(
- SRVR_CRT_PASS_FILE_KEY, SRVR_CRT_PASS_FILE_DEFAULT));
+ SRVR_CRT_PASS_FILE_KEY, SRVR_CRT_PASS_FILE_DEFAULT));
configsMap.put(PASSPHRASE_ENV_KEY, properties.getProperty(
- PASSPHRASE_ENV_KEY, PASSPHRASE_ENV_DEFAULT));
+ PASSPHRASE_ENV_KEY, PASSPHRASE_ENV_DEFAULT));
configsMap.put(PASSPHRASE_KEY, System.getenv(configsMap.get(
- PASSPHRASE_ENV_KEY)));
+ PASSPHRASE_ENV_KEY)));
configsMap.put(RESOURCES_DIR_KEY, properties.getProperty(
- RESOURCES_DIR_KEY, RESOURCES_DIR_DEFAULT));
+ RESOURCES_DIR_KEY, RESOURCES_DIR_DEFAULT));
configsMap.put(SRVR_CRT_PASS_LEN_KEY, properties.getProperty(
- SRVR_CRT_PASS_LEN_KEY, SRVR_CRT_PASS_LEN_DEFAULT));
+ SRVR_CRT_PASS_LEN_KEY, SRVR_CRT_PASS_LEN_DEFAULT));
configsMap.put(SRVR_DISABLED_CIPHERS, properties.getProperty(
- SRVR_DISABLED_CIPHERS, SRVR_DISABLED_CIPHERS_DEFAULT));
+ SRVR_DISABLED_CIPHERS, SRVR_DISABLED_CIPHERS_DEFAULT));
configsMap.put(SRVR_DISABLED_PROTOCOLS, properties.getProperty(
- SRVR_DISABLED_PROTOCOLS, SRVR_DISABLED_PROTOCOLS_DEFAULT));
+ SRVR_DISABLED_PROTOCOLS, SRVR_DISABLED_PROTOCOLS_DEFAULT));
configsMap.put(CLIENT_API_SSL_KSTR_DIR_NAME_KEY, properties.getProperty(
CLIENT_API_SSL_KSTR_DIR_NAME_KEY, configsMap.get(SRVR_KSTR_DIR_KEY)));
configsMap.put(CLIENT_API_SSL_KSTR_NAME_KEY, properties.getProperty(
CLIENT_API_SSL_KSTR_NAME_KEY, CLIENT_API_SSL_KSTR_NAME_DEFAULT));
configsMap.put(CLIENT_API_SSL_KSTR_TYPE_KEY, properties.getProperty(
- CLIENT_API_SSL_KSTR_TYPE_KEY, CLIENT_API_SSL_KSTR_TYPE_DEFAULT));
+ CLIENT_API_SSL_KSTR_TYPE_KEY, CLIENT_API_SSL_KSTR_TYPE_DEFAULT));
configsMap.put(CLIENT_API_SSL_TSTR_NAME_KEY, properties.getProperty(
- CLIENT_API_SSL_TSTR_NAME_KEY, CLIENT_API_SSL_TSTR_NAME_DEFAULT));
+ CLIENT_API_SSL_TSTR_NAME_KEY, CLIENT_API_SSL_TSTR_NAME_DEFAULT));
configsMap.put(CLIENT_API_SSL_TSTR_TYPE_KEY, properties.getProperty(
- CLIENT_API_SSL_TSTR_TYPE_KEY, CLIENT_API_SSL_TSTR_TYPE_DEFAULT));
+ CLIENT_API_SSL_TSTR_TYPE_KEY, CLIENT_API_SSL_TSTR_TYPE_DEFAULT));
configsMap.put(CLIENT_API_SSL_CRT_PASS_FILE_NAME_KEY, properties.getProperty(
CLIENT_API_SSL_CRT_PASS_FILE_NAME_KEY, CLIENT_API_SSL_CRT_PASS_FILE_NAME_DEFAULT));
configsMap.put(CLIENT_API_SSL_KEY_NAME_KEY, properties.getProperty(
@@ -771,41 +779,41 @@ public class Configuration {
configsMap.put(CLIENT_API_SSL_CRT_NAME_KEY, properties.getProperty(
CLIENT_API_SSL_CRT_NAME_KEY, CLIENT_API_SSL_CRT_NAME_DEFAULT));
configsMap.put(JAVA_HOME_KEY, properties.getProperty(
- JAVA_HOME_KEY));
+ JAVA_HOME_KEY));
configsMap.put(PARALLEL_STAGE_EXECUTION_KEY, properties.getProperty(
- PARALLEL_STAGE_EXECUTION_KEY, PARALLEL_STAGE_EXECUTION_DEFAULT));
+ PARALLEL_STAGE_EXECUTION_KEY, PARALLEL_STAGE_EXECUTION_DEFAULT));
configsMap.put(SERVER_TMP_DIR_KEY, properties.getProperty(
- SERVER_TMP_DIR_KEY, SERVER_TMP_DIR_DEFAULT));
+ SERVER_TMP_DIR_KEY, SERVER_TMP_DIR_DEFAULT));
configsMap.put(EXTERNAL_SCRIPT_TIMEOUT_KEY, properties.getProperty(
- EXTERNAL_SCRIPT_TIMEOUT_KEY, EXTERNAL_SCRIPT_TIMEOUT_DEFAULT));
+ EXTERNAL_SCRIPT_TIMEOUT_KEY, EXTERNAL_SCRIPT_TIMEOUT_DEFAULT));
configsMap.put(SHARED_RESOURCES_DIR_KEY, properties.getProperty(
- SHARED_RESOURCES_DIR_KEY, SHARED_RESOURCES_DIR_DEFAULT));
+ SHARED_RESOURCES_DIR_KEY, SHARED_RESOURCES_DIR_DEFAULT));
configsMap.put(KDC_PORT_KEY, properties.getProperty(
- KDC_PORT_KEY, KDC_PORT_KEY_DEFAULT));
+ KDC_PORT_KEY, KDC_PORT_KEY_DEFAULT));
configsMap.put(AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_KEY, properties.getProperty(
- AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_KEY, AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_DEFAULT));
+ AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_KEY, AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_DEFAULT));
configsMap.put(PROXY_ALLOWED_HOST_PORTS, properties.getProperty(
- PROXY_ALLOWED_HOST_PORTS, PROXY_ALLOWED_HOST_PORTS_DEFAULT));
+ PROXY_ALLOWED_HOST_PORTS, PROXY_ALLOWED_HOST_PORTS_DEFAULT));
File passFile = new File(configsMap.get(SRVR_KSTR_DIR_KEY) + File.separator
- + configsMap.get(SRVR_CRT_PASS_FILE_KEY));
+ + configsMap.get(SRVR_CRT_PASS_FILE_KEY));
String password = null;
if (!passFile.exists()) {
LOG.info("Generation of file with password");
try {
password = RandomStringUtils.randomAlphanumeric(Integer
- .parseInt(configsMap.get(SRVR_CRT_PASS_LEN_KEY)));
+ .parseInt(configsMap.get(SRVR_CRT_PASS_LEN_KEY)));
FileUtils.writeStringToFile(passFile, password);
ShellCommandUtil.setUnixFilePermissions(
- ShellCommandUtil.MASK_OWNER_ONLY_RW, passFile.getAbsolutePath());
+ ShellCommandUtil.MASK_OWNER_ONLY_RW, passFile.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(
- "Error reading certificate password from file");
+ "Error reading certificate password from file");
}
} else {
LOG.info("Reading password from existing file");
@@ -884,12 +892,12 @@ public class Configuration {
}
if (properties.getProperty(SSL_TRUSTSTORE_PASSWORD_KEY) != null) {
String ts_password = readPasswordFromStore(
- properties.getProperty(SSL_TRUSTSTORE_PASSWORD_KEY));
+ properties.getProperty(SSL_TRUSTSTORE_PASSWORD_KEY));
if (ts_password != null) {
System.setProperty(JAVAX_SSL_TRUSTSTORE_PASSWORD, ts_password);
} else {
System.setProperty(JAVAX_SSL_TRUSTSTORE_PASSWORD,
- properties.getProperty(SSL_TRUSTSTORE_PASSWORD_KEY));
+ properties.getProperty(SSL_TRUSTSTORE_PASSWORD_KEY));
}
}
if (properties.getProperty(SSL_TRUSTSTORE_TYPE_KEY) != null) {
@@ -901,9 +909,9 @@ public class Configuration {
if (!credentialProviderInitialized) {
try {
credentialProvider = new CredentialProvider(null,
- getMasterKeyLocation(),
- isMasterKeyPersisted(),
- getMasterKeyStoreLocation());
+ getMasterKeyLocation(),
+ isMasterKeyPersisted(),
+ getMasterKeyStoreLocation());
} catch (Exception e) {
LOG.info("Credential provider creation failed. Reason: " + e.getMessage());
if (LOG.isDebugEnabled()) {
@@ -937,7 +945,7 @@ public class Configuration {
LOG.info("No configuration file " + CONFIG_FILE + " found in classpath.", fnf);
} catch (IOException ie) {
throw new IllegalArgumentException("Can't read configuration file " +
- CONFIG_FILE, ie);
+ CONFIG_FILE, ie);
}
return properties;
@@ -1023,7 +1031,7 @@ public class Configuration {
public String getBootSetupAgentScript() {
return properties.getProperty(BOOTSTRAP_SETUP_AGENT_SCRIPT,
- AmbariPath.getPath("/usr/lib/python2.6/site-packages/ambari_server/setupAgent.py"));
+ AmbariPath.getPath("/usr/lib/python2.6/site-packages/ambari_server/setupAgent.py"));
}
public String getBootSetupAgentPassword() {
@@ -1056,7 +1064,7 @@ public class Configuration {
*/
public List<String> getRollingUpgradeSkipPackagesPrefixes() {
String propertyValue = properties.getProperty(ROLLING_UPGRADE_SKIP_PACKAGES_PREFIXES_KEY,
- ROLLING_UPGRADE_SKIP_PACKAGES_PREFIXES_DEFAULT);
+ ROLLING_UPGRADE_SKIP_PACKAGES_PREFIXES_DEFAULT);
ArrayList<String> res = new ArrayList<>();
for (String prefix : propertyValue.split(",")) {
if (! prefix.isEmpty()) {
@@ -1114,7 +1122,7 @@ public class Configuration {
}
public void setLdap(String host, String userClass, String userNameAttr, String groupClass, String groupName, String groupMember,
- String baseDN, boolean anon, String managerDN, String managerPass) {
+ String baseDN, boolean anon, String managerDN, String managerPass) {
properties.setProperty(LDAP_PRIMARY_URL_KEY, host);
properties.setProperty(LDAP_USER_OBJECT_CLASS_KEY, userClass);
properties.setProperty(LDAP_USERNAME_ATTRIBUTE_KEY, userNameAttr);
@@ -1192,7 +1200,7 @@ public class Configuration {
*/
public int getClientSSLApiPort() {
return Integer.parseInt(properties.getProperty(CLIENT_API_SSL_PORT_KEY,
- String.valueOf(CLIENT_API_SSL_PORT_DEFAULT)));
+ String.valueOf(CLIENT_API_SSL_PORT_DEFAULT)));
}
/**
@@ -1322,8 +1330,8 @@ public class Configuration {
*/
public boolean isApiGzipped() {
return "true".equalsIgnoreCase(properties.getProperty(
- API_GZIP_COMPRESSION_ENABLED_KEY,
- API_GZIP_COMPRESSION_ENABLED_DEFAULT));
+ API_GZIP_COMPRESSION_ENABLED_KEY,
+ API_GZIP_COMPRESSION_ENABLED_DEFAULT));
}
/**
@@ -1474,16 +1482,16 @@ public class Configuration {
LdapServerProperties ldapServerProperties = new LdapServerProperties();
ldapServerProperties.setPrimaryUrl(properties.getProperty(
- LDAP_PRIMARY_URL_KEY, LDAP_PRIMARY_URL_DEFAULT));
+ LDAP_PRIMARY_URL_KEY, LDAP_PRIMARY_URL_DEFAULT));
ldapServerProperties.setSecondaryUrl(properties.getProperty(
- LDAP_SECONDARY_URL_KEY));
+ LDAP_SECONDARY_URL_KEY));
ldapServerProperties.setUseSsl("true".equalsIgnoreCase(properties.
- getProperty(LDAP_USE_SSL_KEY)));
+ getProperty(LDAP_USE_SSL_KEY)));
ldapServerProperties.setAnonymousBind("true".
- equalsIgnoreCase(properties.getProperty(LDAP_BIND_ANONYMOUSLY_KEY,
- LDAP_BIND_ANONYMOUSLY_DEFAULT)));
+ equalsIgnoreCase(properties.getProperty(LDAP_BIND_ANONYMOUSLY_KEY,
+ LDAP_BIND_ANONYMOUSLY_DEFAULT)));
ldapServerProperties.setManagerDn(properties.getProperty(
- LDAP_MANAGER_DN_KEY));
+ LDAP_MANAGER_DN_KEY));
String ldapPasswordProperty = properties.getProperty(LDAP_MANAGER_PASSWORD_KEY);
String ldapPassword = null;
if (CredentialProvider.isAliasString(ldapPasswordProperty)) {
@@ -1497,9 +1505,9 @@ public class Configuration {
}
}
ldapServerProperties.setBaseDN(properties.getProperty
- (LDAP_BASE_DN_KEY, LDAP_BASE_DN_DEFAULT));
+ (LDAP_BASE_DN_KEY, LDAP_BASE_DN_DEFAULT));
ldapServerProperties.setUsernameAttribute(properties.
- getProperty(LDAP_USERNAME_ATTRIBUTE_KEY, LDAP_USERNAME_ATTRIBUTE_DEFAULT));
+ getProperty(LDAP_USERNAME_ATTRIBUTE_KEY, LDAP_USERNAME_ATTRIBUTE_DEFAULT));
ldapServerProperties.setUserBase(properties.getProperty(
LDAP_USER_BASE_KEY, LDAP_USER_BASE_DEFAULT));
@@ -1509,28 +1517,28 @@ public class Configuration {
LDAP_DN_ATTRIBUTE_KEY, LDAP_DN_ATTRIBUTE_DEFAULT));
ldapServerProperties.setGroupBase(properties.
- getProperty(LDAP_GROUP_BASE_KEY, LDAP_GROUP_BASE_DEFAULT));
+ getProperty(LDAP_GROUP_BASE_KEY, LDAP_GROUP_BASE_DEFAULT));
ldapServerProperties.setGroupObjectClass(properties.
- getProperty(LDAP_GROUP_OBJECT_CLASS_KEY, LDAP_GROUP_OBJECT_CLASS_DEFAULT));
+ getProperty(LDAP_GROUP_OBJECT_CLASS_KEY, LDAP_GROUP_OBJECT_CLASS_DEFAULT));
ldapServerProperties.setGroupMembershipAttr(properties.getProperty(
- LDAP_GROUP_MEMEBERSHIP_ATTR_KEY, LDAP_GROUP_MEMBERSHIP_ATTR_DEFAULT));
+ LDAP_GROUP_MEMEBERSHIP_ATTR_KEY, LDAP_GROUP_MEMBERSHIP_ATTR_DEFAULT));
ldapServerProperties.setGroupNamingAttr(properties.
- getProperty(LDAP_GROUP_NAMING_ATTR_KEY, LDAP_GROUP_NAMING_ATTR_DEFAULT));
+ getProperty(LDAP_GROUP_NAMING_ATTR_KEY, LDAP_GROUP_NAMING_ATTR_DEFAULT));
ldapServerProperties.setAdminGroupMappingRules(properties.getProperty(
- LDAP_ADMIN_GROUP_MAPPING_RULES_KEY, LDAP_ADMIN_GROUP_MAPPING_RULES_DEFAULT));
+ LDAP_ADMIN_GROUP_MAPPING_RULES_KEY, LDAP_ADMIN_GROUP_MAPPING_RULES_DEFAULT));
ldapServerProperties.setGroupSearchFilter(properties.getProperty(
- LDAP_GROUP_SEARCH_FILTER_KEY, LDAP_GROUP_SEARCH_FILTER_DEFAULT));
+ LDAP_GROUP_SEARCH_FILTER_KEY, LDAP_GROUP_SEARCH_FILTER_DEFAULT));
ldapServerProperties.setReferralMethod(properties.getProperty(
LDAP_REFERRAL_KEY, LDAP_REFERRAL_DEFAULT));
ldapServerProperties.setPaginationEnabled("true".equalsIgnoreCase(
properties.getProperty(LDAP_PAGINATION_ENABLED_KEY, LDAP_PAGINATION_ENABLED_DEFAULT)));
if (properties.containsKey(LDAP_GROUP_BASE_KEY) ||
- properties.containsKey(LDAP_GROUP_OBJECT_CLASS_KEY) ||
- properties.containsKey(LDAP_GROUP_MEMEBERSHIP_ATTR_KEY) ||
- properties.containsKey(LDAP_GROUP_NAMING_ATTR_KEY) ||
- properties.containsKey(LDAP_ADMIN_GROUP_MAPPING_RULES_KEY) ||
- properties.containsKey(LDAP_GROUP_SEARCH_FILTER_KEY)) {
+ properties.containsKey(LDAP_GROUP_OBJECT_CLASS_KEY) ||
+ properties.containsKey(LDAP_GROUP_MEMEBERSHIP_ATTR_KEY) ||
+ properties.containsKey(LDAP_GROUP_NAMING_ATTR_KEY) ||
+ properties.containsKey(LDAP_ADMIN_GROUP_MAPPING_RULES_KEY) ||
+ properties.containsKey(LDAP_GROUP_SEARCH_FILTER_KEY)) {
ldapServerProperties.setGroupMappingEnabled(true);
}
@@ -1558,7 +1566,7 @@ public class Configuration {
}
public String getOjdbcJarName() {
- return properties.getProperty(OJDBC_JAR_NAME_KEY, OJDBC_JAR_NAME_DEFAULT);
+ return properties.getProperty(OJDBC_JAR_NAME_KEY, OJDBC_JAR_NAME_DEFAULT);
}
public String getJavaHome() {
@@ -1574,11 +1582,11 @@ public class Configuration {
}
public String getServerDBName() {
- return properties.getProperty(SERVER_DB_NAME_KEY, SERVER_DB_NAME_DEFAULT);
+ return properties.getProperty(SERVER_DB_NAME_KEY, SERVER_DB_NAME_DEFAULT);
}
public String getMySQLJarName() {
- return properties.getProperty(MYSQL_JAR_NAME_KEY, MYSQL_JAR_NAME_DEFAULT);
+ return properties.getProperty(MYSQL_JAR_NAME_KEY, MYSQL_JAR_NAME_DEFAULT);
}
public JPATableGenerationStrategy getJPATableGenerationStrategy() {
@@ -1606,8 +1614,8 @@ public class Configuration {
public File getServerKeyStoreDirectory() {
String path = properties.getProperty(SRVR_KSTR_DIR_KEY, SRVR_KSTR_DIR_DEFAULT);
return ((path == null) || path.isEmpty())
- ? new File(".")
- : new File(path);
+ ? new File(".")
+ : new File(path);
}
/**
@@ -1684,7 +1692,7 @@ public class Configuration {
if(StringUtils.isEmpty(value)) {
LOG.debug("Value of {} is not set, using default value ({})",
- TEMPORARY_KEYSTORE_RETENTION_MINUTES, TEMPORARY_KEYSTORE_RETENTION_MINUTES_DEFAULT);
+ TEMPORARY_KEYSTORE_RETENTION_MINUTES, TEMPORARY_KEYSTORE_RETENTION_MINUTES_DEFAULT);
minutes = TEMPORARY_KEYSTORE_RETENTION_MINUTES_DEFAULT;
}
else {
@@ -1693,7 +1701,7 @@ public class Configuration {
LOG.debug("Value of {} is {}", TEMPORARY_KEYSTORE_RETENTION_MINUTES, value);
} catch (NumberFormatException e) {
LOG.warn("Value of {} ({}) should be a number, falling back to default value ({})",
- TEMPORARY_KEYSTORE_RETENTION_MINUTES, value, TEMPORARY_KEYSTORE_RETENTION_MINUTES_DEFAULT);
+ TEMPORARY_KEYSTORE_RETENTION_MINUTES, value, TEMPORARY_KEYSTORE_RETENTION_MINUTES_DEFAULT);
minutes = TEMPORARY_KEYSTORE_RETENTION_MINUTES_DEFAULT;
}
}
@@ -1715,7 +1723,7 @@ public class Configuration {
if (StringUtils.isEmpty(value)) {
LOG.debug("Value of {} is not set, using default value ({})",
- TEMPORARY_KEYSTORE_ACTIVELY_PURGE, TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT);
+ TEMPORARY_KEYSTORE_ACTIVELY_PURGE, TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT);
return TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT;
} else if ("true".equalsIgnoreCase(value)) {
LOG.debug("Value of {} is {}", TEMPORARY_KEYSTORE_ACTIVELY_PURGE, value);
@@ -1725,22 +1733,22 @@ public class Configuration {
return false;
} else {
LOG.warn("Value of {} should be either \"true\" or \"false\" but is \"{}\", falling back to default value ({})",
- TEMPORARY_KEYSTORE_ACTIVELY_PURGE, value, TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT);
+ TEMPORARY_KEYSTORE_ACTIVELY_PURGE, value, TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT);
return TEMPORARY_KEYSTORE_ACTIVELY_PURGE_DEFAULT;
}
}
public String getSrvrDisabledCiphers() {
String disabledCiphers = properties.getProperty(SRVR_DISABLED_CIPHERS,
- properties.getProperty(SRVR_DISABLED_CIPHERS,
- SRVR_DISABLED_CIPHERS_DEFAULT));
+ properties.getProperty(SRVR_DISABLED_CIPHERS,
+ SRVR_DISABLED_CIPHERS_DEFAULT));
return disabledCiphers.trim();
}
public String getSrvrDisabledProtocols() {
String disabledProtocols = properties.getProperty(SRVR_DISABLED_PROTOCOLS,
- properties.getProperty(SRVR_DISABLED_PROTOCOLS,
- SRVR_DISABLED_PROTOCOLS_DEFAULT));
+ properties.getProperty(SRVR_DISABLED_PROTOCOLS,
+ SRVR_DISABLED_PROTOCOLS_DEFAULT));
return disabledProtocols.trim();
}
@@ -1751,7 +1759,7 @@ public class Configuration {
public int getTwoWayAuthPort() {
return Integer.parseInt(properties.getProperty(SRVR_TWO_WAY_SSL_PORT_KEY,
- String.valueOf(SRVR_TWO_WAY_SSL_PORT_DEFAULT)));
+ String.valueOf(SRVR_TWO_WAY_SSL_PORT_DEFAULT)));
}
/**
@@ -1887,10 +1895,10 @@ public class Configuration {
if(osFamily.isUbuntuFamily(osType)) {
repoSuffixes = properties.getProperty(REPO_SUFFIX_KEY_UBUNTU,
- REPO_SUFFIX_UBUNTU);
+ REPO_SUFFIX_UBUNTU);
} else {
repoSuffixes = properties.getProperty(REPO_SUFFIX_KEY_DEFAULT,
- REPO_SUFFIX_DEFAULT);
+ REPO_SUFFIX_DEFAULT);
}
return repoSuffixes.split(",");
@@ -1903,7 +1911,7 @@ public class Configuration {
public String getExecutionSchedulerThreads() {
return properties.getProperty(EXECUTION_SCHEDULER_THREADS_KEY,
- DEFAULT_SCHEDULER_THREAD_COUNT);
+ DEFAULT_SCHEDULER_THREAD_COUNT);
}
public Integer getRequestReadTimeout() {
@@ -1913,7 +1921,7 @@ public class Configuration {
public Integer getRequestConnectTimeout() {
return Integer.parseInt(properties.getProperty(REQUEST_CONNECT_TIMEOUT,
- REQUEST_CONNECT_TIMEOUT_DEFAULT));
+ REQUEST_CONNECT_TIMEOUT_DEFAULT));
}
public String getExecutionSchedulerConnections() {
@@ -1930,7 +1938,7 @@ public class Configuration {
public Integer getExecutionSchedulerStartDelay() {
String delay = properties.getProperty(EXECUTION_SCHEDULER_START_DELAY_KEY,
- DEFAULT_SCHEDULER_START_DELAY_SECONDS);
+ DEFAULT_SCHEDULER_START_DELAY_SECONDS);
return Integer.parseInt(delay);
}
@@ -1944,7 +1952,7 @@ public class Configuration {
sleepTime = Long.valueOf(stringValue);
} catch (NumberFormatException ignored) {
LOG.warn("Value of {} ({}) should be a number, " +
- "falling back to default value ({})", EXECUTION_SCHEDULER_WAIT_KEY,
+ "falling back to default value ({})", EXECUTION_SCHEDULER_WAIT_KEY,
stringValue, DEFAULT_EXECUTION_SCHEDULER_WAIT_SECONDS);
}
@@ -1969,13 +1977,13 @@ public class Configuration {
public String getCustomActionDefinitionPath() {
return properties.getProperty(CUSTOM_ACTION_DEFINITION_KEY,
- CUSTOM_ACTION_DEFINITION_DEF_VALUE);
+ CUSTOM_ACTION_DEFINITION_DEF_VALUE);
}
public int getAgentPackageParallelCommandsLimit() {
int value = Integer.parseInt(properties.getProperty(
- AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_KEY,
- AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_DEFAULT));
+ AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_KEY,
+ AGENT_PACKAGE_PARALLEL_COMMANDS_LIMIT_DEFAULT));
if (value < 1) {
value = 1;
}
@@ -1996,7 +2004,7 @@ public class Configuration {
} else {
LOG.warn(String.format("Value of %s (%s) should be a number, " +
"falling back to default value (%s)",
- key, value, defaultValue));
+ key, value, defaultValue));
return defaultValue;
}
}
@@ -2010,7 +2018,7 @@ public class Configuration {
return Integer.parseInt(value);
} else {
LOG.warn("Value of {} ({}) should be a number, falling back to default value ({})",
- SERVER_TASK_TIMEOUT_KEY, value, SERVER_TASK_TIMEOUT_DEFAULT);
+ SERVER_TASK_TIMEOUT_KEY, value, SERVER_TASK_TIMEOUT_DEFAULT);
return Integer.parseInt(SERVER_TASK_TIMEOUT_DEFAULT);
}
}
@@ -2020,7 +2028,7 @@ public class Configuration {
}
public String getSharedResourcesDirPath(){
- return properties.getProperty(SHARED_RESOURCES_DIR_KEY, SHARED_RESOURCES_DIR_DEFAULT);
+ return properties.getProperty(SHARED_RESOURCES_DIR_KEY, SHARED_RESOURCES_DIR_DEFAULT);
}
public String getServerJDBCPostgresSchemaName() {
@@ -2032,7 +2040,7 @@ public class Configuration {
*/
public int getClientThreadPoolSize() {
return Integer.parseInt(properties.getProperty(
- CLIENT_THREADPOOL_SIZE_KEY, String.valueOf(CLIENT_THREADPOOL_SIZE_DEFAULT)));
+ CLIENT_THREADPOOL_SIZE_KEY, String.valueOf(CLIENT_THREADPOOL_SIZE_DEFAULT)));
}
/**
@@ -2040,7 +2048,7 @@ public class Configuration {
*/
public int getAgentThreadPoolSize() {
return Integer.parseInt(properties.getProperty(
- AGENT_THREADPOOL_SIZE_KEY, String.valueOf(AGENT_THREADPOOL_SIZE_DEFAULT)));
+ AGENT_THREADPOOL_SIZE_KEY, String.valueOf(AGENT_THREADPOOL_SIZE_DEFAULT)));
}
/**
@@ -2050,7 +2058,7 @@ public class Configuration {
*/
public int getViewExtractionThreadPoolMaxSize() {
return Integer.parseInt(properties.getProperty(
- VIEW_EXTRACTION_THREADPOOL_MAX_SIZE_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_MAX_SIZE_DEFAULT)));
+ VIEW_EXTRACTION_THREADPOOL_MAX_SIZE_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_MAX_SIZE_DEFAULT)));
}
/**
@@ -2060,7 +2068,7 @@ public class Configuration {
*/
public int getViewExtractionThreadPoolCoreSize() {
return Integer.parseInt(properties.getProperty(
- VIEW_EXTRACTION_THREADPOOL_CORE_SIZE_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_CORE_SIZE_DEFAULT)));
+ VIEW_EXTRACTION_THREADPOOL_CORE_SIZE_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_CORE_SIZE_DEFAULT)));
}
/**
@@ -2070,7 +2078,7 @@ public class Configuration {
*/
public int getPropertyProvidersThreadPoolCoreSize() {
return Integer.parseInt(properties.getProperty(PROPERTY_PROVIDER_THREADPOOL_CORE_SIZE_KEY,
- String.valueOf(PROPERTY_PROVIDER_THREADPOOL_CORE_SIZE_DEFAULT)));
+ String.valueOf(PROPERTY_PROVIDER_THREADPOOL_CORE_SIZE_DEFAULT)));
}
/**
@@ -2080,7 +2088,7 @@ public class Configuration {
*/
public int getPropertyProvidersThreadPoolMaxSize() {
return Integer.parseInt(properties.getProperty(PROPERTY_PROVIDER_THREADPOOL_MAX_SIZE_KEY,
- String.valueOf(PROPERTY_PROVIDER_THREADPOOL_MAX_SIZE_DEFAULT)));
+ String.valueOf(PROPERTY_PROVIDER_THREADPOOL_MAX_SIZE_DEFAULT)));
}
/**
@@ -2090,7 +2098,7 @@ public class Configuration {
*/
public long getViewExtractionThreadPoolTimeout() {
return Long.parseLong(properties.getProperty(
- VIEW_EXTRACTION_THREADPOOL_TIMEOUT_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_TIMEOUT_DEFAULT)));
+ VIEW_EXTRACTION_THREADPOOL_TIMEOUT_KEY, String.valueOf(VIEW_EXTRACTION_THREADPOOL_TIMEOUT_DEFAULT)));
}
/**
@@ -2251,8 +2259,8 @@ public class Configuration {
databaseType = DatabaseType.SQL_ANYWHERE;
} else {
throw new RuntimeException(
- "The database type could be not determined from the JDBC URL "
- + dbUrl);
+ "The database type could be not determined from the JDBC URL "
+ + dbUrl);
}
return databaseType;
@@ -2289,7 +2297,7 @@ public class Configuration {
*/
public ConnectionPoolType getConnectionPoolType(){
String connectionPoolType = properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL, ConnectionPoolType.INTERNAL.getName());
+ SERVER_JDBC_CONNECTION_POOL, ConnectionPoolType.INTERNAL.getName());
if (connectionPoolType.equals(ConnectionPoolType.C3P0.getName())) {
return ConnectionPoolType.C3P0;
@@ -2306,7 +2314,7 @@ public class Configuration {
*/
public int getConnectionPoolMinimumSize() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_MIN_SIZE, DEFAULT_JDBC_POOL_MIN_CONNECTIONS));
+ SERVER_JDBC_CONNECTION_POOL_MIN_SIZE, DEFAULT_JDBC_POOL_MIN_CONNECTIONS));
}
/**
@@ -2317,7 +2325,7 @@ public class Configuration {
*/
public int getConnectionPoolMaximumSize() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_MAX_SIZE, DEFAULT_JDBC_POOL_MAX_CONNECTIONS));
+ SERVER_JDBC_CONNECTION_POOL_MAX_SIZE, DEFAULT_JDBC_POOL_MAX_CONNECTIONS));
}
/**
@@ -2329,7 +2337,7 @@ public class Configuration {
*/
public int getConnectionPoolMaximumAge() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_MAX_AGE, DEFAULT_JDBC_POOL_MAX_AGE_SECONDS));
+ SERVER_JDBC_CONNECTION_POOL_MAX_AGE, DEFAULT_JDBC_POOL_MAX_AGE_SECONDS));
}
/**
@@ -2341,8 +2349,8 @@ public class Configuration {
*/
public int getConnectionPoolMaximumIdle() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_MAX_IDLE_TIME,
- DEFAULT_JDBC_POOL_MAX_IDLE_TIME_SECONDS));
+ SERVER_JDBC_CONNECTION_POOL_MAX_IDLE_TIME,
+ DEFAULT_JDBC_POOL_MAX_IDLE_TIME_SECONDS));
}
/**
@@ -2354,8 +2362,8 @@ public class Configuration {
*/
public int getConnectionPoolMaximumExcessIdle() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_MAX_IDLE_TIME_EXCESS,
- DEFAULT_JDBC_POOL_EXCESS_MAX_IDLE_TIME_SECONDS));
+ SERVER_JDBC_CONNECTION_POOL_MAX_IDLE_TIME_EXCESS,
+ DEFAULT_JDBC_POOL_EXCESS_MAX_IDLE_TIME_SECONDS));
}
/**
@@ -2367,8 +2375,8 @@ public class Configuration {
*/
public int getConnectionPoolAcquisitionSize() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_AQUISITION_SIZE,
- DEFAULT_JDBC_POOL_ACQUISITION_SIZE));
+ SERVER_JDBC_CONNECTION_POOL_AQUISITION_SIZE,
+ DEFAULT_JDBC_POOL_ACQUISITION_SIZE));
}
/**
@@ -2379,8 +2387,8 @@ public class Configuration {
*/
public int getConnectionPoolAcquisitionRetryAttempts() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_ACQUISITION_RETRY_ATTEMPTS,
- DEFAULT_JDBC_POOL_ACQUISITION_RETRY_ATTEMPTS));
+ SERVER_JDBC_CONNECTION_POOL_ACQUISITION_RETRY_ATTEMPTS,
+ DEFAULT_JDBC_POOL_ACQUISITION_RETRY_ATTEMPTS));
}
/**
@@ -2390,8 +2398,8 @@ public class Configuration {
*/
public int getConnectionPoolAcquisitionRetryDelay() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_ACQUISITION_RETRY_DELAY,
- DEFAULT_JDBC_POOL_ACQUISITION_RETRY_DELAY));
+ SERVER_JDBC_CONNECTION_POOL_ACQUISITION_RETRY_DELAY,
+ DEFAULT_JDBC_POOL_ACQUISITION_RETRY_DELAY));
}
@@ -2403,8 +2411,8 @@ public class Configuration {
*/
public int getConnectionPoolIdleTestInternval() {
return Integer.parseInt(properties.getProperty(
- SERVER_JDBC_CONNECTION_POOL_IDLE_TEST_INTERVAL,
- DEFAULT_JDBC_POOL_IDLE_TEST_INTERVAL));
+ SERVER_JDBC_CONNECTION_POOL_IDLE_TEST_INTERVAL,
+ DEFAULT_JDBC_POOL_IDLE_TEST_INTERVAL));
}
/**
@@ -2446,7 +2454,7 @@ public class Configuration {
*/
public int getMetricCacheIdleSeconds() {
return Integer.parseInt(properties.getProperty(TIMELINE_METRICS_CACHE_IDLE_TIME,
- DEFAULT_TIMELINE_METRICS_CACHE_IDLE_TIME));
+ DEFAULT_TIMELINE_METRICS_CACHE_IDLE_TIME));
}
/**
@@ -2577,7 +2585,7 @@ public class Configuration {
@Experimental(feature = ExperimentalFeature.PARALLEL_PROCESSING)
public boolean isExperimentalConcurrentStageProcessingEnabled() {
return Boolean.parseBoolean(properties.getProperty(
- EXPERIMENTAL_CONCURRENCY_STAGE_PROCESSING_ENABLED, Boolean.FALSE.toString()));
+ EXPERIMENTAL_CONCURRENCY_STAGE_PROCESSING_ENABLED, Boolean.FALSE.toString()));
}
/**
@@ -2593,7 +2601,7 @@ public class Configuration {
@Experimental(feature = ExperimentalFeature.ALERT_CACHING)
public boolean isAlertCacheEnabled() {
return Boolean.parseBoolean(
- properties.getProperty(ALERTS_CACHE_ENABLED, Boolean.FALSE.toString()));
+ properties.getProperty(ALERTS_CACHE_ENABLED, Boolean.FALSE.toString()));
}
/**
@@ -2606,7 +2614,7 @@ public class Configuration {
@Experimental(feature = ExperimentalFeature.ALERT_CACHING)
public int getAlertCacheFlushInterval() {
return Integer.parseInt(
- properties.getProperty(ALERTS_CACHE_FLUSH_INTERVAL, ALERTS_CACHE_FLUSH_INTERVAL_DEFAULT));
+ properties.getProperty(ALERTS_CACHE_FLUSH_INTERVAL, ALERTS_CACHE_FLUSH_INTERVAL_DEFAULT));
}
/**
@@ -2637,11 +2645,11 @@ public class Configuration {
Integer attempts = Integer.valueOf(property);
if (attempts < 0) {
LOG.warn("Invalid operations retry attempts number ({}), should be [0,{}]. Value reset to default {}",
- attempts, RETRY_ATTEMPTS_LIMIT, OPERATIONS_RETRY_ATTEMPTS_DEFAULT);
+ attempts, RETRY_ATTEMPTS_LIMIT, OPERATIONS_RETRY_ATTEMPTS_DEFAULT);
attempts = Integer.valueOf(OPERATIONS_RETRY_ATTEMPTS_DEFAULT);
} else if (attempts > RETRY_ATTEMPTS_LIMIT) {
LOG.warn("Invalid operations retry attempts number ({}), should be [0,{}]. Value set to {}",
- attempts, RETRY_ATTEMPTS_LIMIT, RETRY_ATTEMPTS_LIMIT);
+ attempts, RETRY_ATTEMPTS_LIMIT, RETRY_ATTEMPTS_LIMIT);
attempts = RETRY_ATTEMPTS_LIMIT;
}
if (attempts > 0) {
@@ -2655,15 +2663,23 @@ public class Configuration {
*/
public int getVersionDefinitionConnectTimeout() {
return NumberUtils.toInt(
- properties.getProperty(VERSION_DEFINITION_CONNECT_TIMEOUT),
- VERSION_DEFINITION_CONNECT_TIMEOUT_DEFAULT);
+ properties.getProperty(VERSION_DEFINITION_CONNECT_TIMEOUT),
+ VERSION_DEFINITION_CONNECT_TIMEOUT_DEFAULT);
}
/**
* @return the read timeout used when loading a version definition URL
*/
public int getVersionDefinitionReadTimeout() {
return NumberUtils.toInt(
- properties.getProperty(VERSION_DEFINITION_READ_TIMEOUT),
- VERSION_DEFINITION_READ_TIMEOUT_DEFAULT);
+ properties.getProperty(VERSION_DEFINITION_READ_TIMEOUT),
+ VERSION_DEFINITION_READ_TIMEOUT_DEFAULT);
+ }
+
+ public String getAgentStackRetryOnInstallCount(){
+ return properties.getProperty(AGENT_STACK_RETRY_COUNT_KEY, AGENT_STACK_RETRY_COUNT_DEFAULT);
+ }
+
+ public String isAgentStackRetryOnInstallEnabled(){
+ return properties.getProperty(AGENT_STACK_RETRY_ON_REPO_UNAVAILABILITY_KEY, AGENT_STACK_RETRY_ON_REPO_UNAVAILABILITY_DEFAULT);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 88180c0..7a7bc21 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -18,6 +18,8 @@
package org.apache.ambari.server.controller;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_ON_UNAVAILABILITY;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_COUNT;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
@@ -58,7 +60,6 @@ import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
import org.apache.ambari.server.utils.SecretReference;
-import org.apache.ambari.server.utils.StageUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -414,7 +415,10 @@ public class AmbariActionExecutionHelper {
execCmd.setComponentName(componentName == null || componentName.isEmpty() ?
resourceFilter.getComponentName() : componentName);
- addRepoInfoToHostLevelParams(cluster, execCmd.getHostLevelParams(), hostName);
+ Map<String, String> hostLevelParams = execCmd.getHostLevelParams();
+ hostLevelParams.put(AGENT_STACK_RETRY_ON_UNAVAILABILITY, configs.isAgentStackRetryOnInstallEnabled());
+ hostLevelParams.put(AGENT_STACK_RETRY_COUNT, configs.getAgentStackRetryOnInstallCount());
+ addRepoInfoToHostLevelParams(cluster, hostLevelParams, hostName);
Map<String, String> roleParams = execCmd.getRoleParams();
if (roleParams == null) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 24728bf..a94c6b4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -18,6 +18,8 @@
package org.apache.ambari.server.controller;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_ON_UNAVAILABILITY;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AGENT_STACK_RETRY_COUNT;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CLIENTS_TO_UPDATE_CONFIGS;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
@@ -1149,6 +1151,8 @@ public class AmbariCustomCommandExecutionHelper {
hostLevelParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
hostLevelParams.putAll(managementController.getRcaParameters());
hostLevelParams.put(HOST_SYS_PREPPED, configs.areHostsSysPrepped());
+ hostLevelParams.put(AGENT_STACK_RETRY_ON_UNAVAILABILITY, configs.isAgentStackRetryOnInstallEnabled());
+ hostLevelParams.put(AGENT_STACK_RETRY_COUNT, configs.getAgentStackRetryOnInstallCount());
ClusterVersionEntity clusterVersionEntity = clusterVersionDAO.findByClusterAndStateCurrent(cluster.getClusterName());
if (clusterVersionEntity == null) {
List<ClusterVersionEntity> clusterVersionEntityList = clusterVersionDAO
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index 1240e7c..ee19ee4 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -191,7 +191,9 @@ def hbase(name=None):
params.HdfsResource(None, action="execute")
if params.phoenix_enabled:
- Package(params.phoenix_package)
+ Package(params.phoenix_package,
+ retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
+ retry_count=params.agent_stack_retry_count)
def hbase_TemplateConfig(name, tag=None):
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 03486a7..f9694c6 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -24,6 +24,7 @@ from functions import calc_xmn_from_xms, ensure_unit_for_memory
from ambari_commons.constants import AMBARI_SUDO_BINARY
from ambari_commons.os_check import OSCheck
+from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions import conf_select
@@ -42,6 +43,9 @@ exec_tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY
stack_name = default("/hostLevelParams/stack_name", None)
+agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
+agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
+
version = default("/commandParams/version", None)
component_directory = status_params.component_directory
etc_prefix_dir = "/etc/hbase"
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index 246bf07..1264284 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -117,7 +117,9 @@ def hdfs(name=None):
)
if params.lzo_enabled and len(params.lzo_packages) > 0:
- Package(params.lzo_packages)
+ Package(params.lzo_packages,
+ retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
+ retry_count=params.agent_stack_retry_count)
def install_snappy():
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 3fb4486..5242694 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -24,6 +24,7 @@ import os
import re
from ambari_commons.os_check import OSCheck
+from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import hdp_select
@@ -47,6 +48,8 @@ stack_name = default("/hostLevelParams/stack_name", None)
upgrade_direction = default("/commandParams/upgrade_direction", None)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
+agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
version = default("/commandParams/version", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index caaa9f3..2531598 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -26,6 +26,7 @@ from urlparse import urlparse
from ambari_commons.constants import AMBARI_SUDO_BINARY
from ambari_commons.os_check import OSCheck
+from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.default import default
@@ -44,6 +45,8 @@ tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY
stack_name = default("/hostLevelParams/stack_name", None)
+agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
+agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
# node hostname
hostname = config["hostname"]
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py
index c92e3db..e78190f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/setup_atlas_hive.py
@@ -32,7 +32,7 @@ def setup_atlas_hive(configuration_directory=None):
if not params.host_sys_prepped:
Package(params.atlas_ubuntu_plugin_package if OSCheck.is_ubuntu_family() else params.atlas_plugin_package, # FIXME HACK: install the package during RESTART/START when install_packages is not triggered.
- )
+ retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability, retry_count=params.agent_stack_retry_count)
PropertiesFile(format('{configuration_directory}/client.properties'),
properties = params.atlas_client_props,
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index 2dd362a..df9ecfe 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -305,7 +305,9 @@ def oozie_server_specific():
not_if = no_op_test)
if params.lzo_enabled and len(params.all_lzo_packages) > 0:
- Package(params.all_lzo_packages)
+ Package(params.all_lzo_packages,
+ retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
+ retry_count=params.agent_stack_retry_count)
Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'),
not_if = no_op_test,
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 81c894a..072b127 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -19,6 +19,7 @@ limitations under the License.
"""
from resource_management import *
from ambari_commons.constants import AMBARI_SUDO_BINARY
+from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.functions import format
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import hdp_select
@@ -46,6 +47,8 @@ hostname = config["hostname"]
version = default("/commandParams/version", None)
stack_name = default("/hostLevelParams/stack_name", None)
upgrade_direction = default("/commandParams/upgrade_direction", None)
+agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
+agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index bf80616..6f8ef9f 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -31,12 +31,14 @@ from resource_management import *
import resource_management
from resource_management.libraries.functions.list_ambari_managed_repos import list_ambari_managed_repos
from ambari_commons.os_check import OSCheck, OSConst
+from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.functions.packages_analyzer import allInstalledPackages
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions.hdp_select import get_hdp_versions
from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
from resource_management.libraries.functions.repo_version_history \
import read_actual_version_from_history_file, write_actual_version_to_history_file, REPO_VERSION_HISTORY_FILE
+from resource_management.core.resources.system import Execute
from resource_management.core.logger import Logger
@@ -52,7 +54,7 @@ class InstallPackages(Script):
UBUNTU_REPO_COMPONENTS_POSTFIX = ["main"]
REPO_FILE_NAME_PREFIX = 'HDP-'
STACK_TO_ROOT_FOLDER = {"HDP": "/usr/hdp"}
-
+
def actionexecute(self, env):
num_errors = 0
@@ -332,11 +334,18 @@ class InstallPackages(Script):
:return: Returns 0 if no errors were found, and 1 otherwise.
"""
ret_code = 0
+
+ config = self.get_config()
+ agent_stack_retry_on_unavailability = cbool(config['hostLevelParams']['agent_stack_retry_on_unavailability'])
+ agent_stack_retry_count = cint(config['hostLevelParams']['agent_stack_retry_count'])
+
# Install packages
packages_were_checked = False
try:
Package("hdp-select",
action="upgrade",
+ retry_on_repo_unavailability=agent_stack_retry_on_unavailability,
+ retry_count=agent_stack_retry_count
)
packages_installed_before = []
@@ -347,7 +356,9 @@ class InstallPackages(Script):
for package in filtered_package_list:
name = self.format_package_name(package['name'])
Package(name,
- action="upgrade" # this enables upgrading non-versioned packages, despite the fact they exist. Needed by 'mahout' which is non-version but have to be updated
+ action="upgrade", # this enables upgrading non-versioned packages, despite the fact they exist. Needed by 'mahout' which is non-version but have to be updated
+ retry_on_repo_unavailability=agent_stack_retry_on_unavailability,
+ retry_count=agent_stack_retry_count
)
except Exception, err:
ret_code = 1
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/params.py
index a687ea7..9fff7507 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/params.py
@@ -17,6 +17,7 @@ limitations under the License.
"""
+from ambari_commons.str_utils import cbool, cint
from resource_management import *
from resource_management.core.system import System
import json
@@ -28,6 +29,9 @@ tmp_dir = Script.get_tmp_dir()
#RPM versioning support
rpm_version = default("/configurations/cluster-env/rpm_version", None)
+agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
+agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
+
#users and groups
hbase_user = config['configurations']['hbase-env']['hbase_user']
smoke_user = config['configurations']['cluster-env']['smokeuser']
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/shared_initialization.py
index 03afc44..54fb8a3 100644
--- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -60,4 +60,7 @@ def install_packages():
packages = ['unzip', 'curl']
if params.rpm_version:
packages.append('bigtop-select')
- Package(packages)
+ Package(packages,
+ retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
+ retry_count=params.agent_stack_retry_count)
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/7e81d376/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index cf20df4..226cb0f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -18,6 +18,7 @@ limitations under the License.
"""
from ambari_commons.constants import AMBARI_SUDO_BINARY
+from ambari_commons.str_utils import cbool, cint
from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
from resource_management.core.system import System
from resource_management.libraries.script.script import Script
@@ -28,6 +29,9 @@ tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"])
+agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"])
+
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
#users and groups