You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/11/17 13:32:25 UTC
[01/11] ambari git commit: AMBARI-22444 - Add Native Libraries To Tez
Tarball (jonathanhurley)
Repository: ambari
Updated Branches:
refs/heads/branch-feature-AMBARI-22457 3df5ae74a -> 2f02bc693
AMBARI-22444 - Add Native Libraries To Tez Tarball (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6e247b3f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6e247b3f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6e247b3f
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 6e247b3f5c06bb108687e8278f474ed3d6330ade
Parents: 56079f2
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Nov 14 16:19:30 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Nov 15 14:40:33 2017 -0500
----------------------------------------------------------------------
.../libraries/functions/copy_tarball.py | 82 +++++++++++++++++---
.../libraries/functions/tar_archive.py | 6 +-
.../stacks/2.1/FALCON/test_falcon_server.py | 6 +-
3 files changed, 80 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e247b3f/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 03b6213..6d0650d 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -23,18 +23,74 @@ __all__ = ["copy_to_hdfs", "get_sysprep_skip_copy_tarballs_hdfs"]
import os
import tempfile
import re
+import tarfile
+from contextlib import closing
from resource_management.libraries.script.script import Script
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.default import default
from resource_management.core import shell
+from resource_management.core import sudo
from resource_management.core.logger import Logger
+from resource_management.core.exceptions import Fail
+from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import stack_tools, stack_features, stack_select
+from resource_management.libraries.functions import tar_archive
STACK_NAME_PATTERN = "{{ stack_name }}"
STACK_ROOT_PATTERN = "{{ stack_root }}"
STACK_VERSION_PATTERN = "{{ stack_version }}"
+def _prepare_tez_tarball():
+ """
+ Prepares the Tez tarball by adding the Hadoop native libraries found in the mapreduce tarball.
+ :return: the full path of the newly created tez tarball to use
+ """
+ import tempfile
+
+ Logger.info("Preparing the Tez tarball...")
+
+ # get the mapreduce tarball which matches the version of tez
+ # tez installs the mapreduce tar, so it should always be present
+ _, mapreduce_source_file, _, _ = get_tarball_paths("mapreduce")
+ _, tez_source_file, _, _ = get_tarball_paths("tez")
+
+ temp_dir = Script.get_tmp_dir()
+
+ mapreduce_temp_dir = tempfile.mkdtemp(prefix="mapreduce-tarball-", dir=temp_dir)
+ tez_temp_dir = tempfile.mkdtemp(prefix="tez-tarball-", dir=temp_dir)
+
+ Logger.info("Extracting {0} to {1}".format(mapreduce_source_file, mapreduce_temp_dir))
+ tar_archive.extract_archive(mapreduce_source_file, mapreduce_temp_dir)
+
+ Logger.info("Extracting {0} to {1}".format(tez_source_file, tez_temp_dir))
+ tar_archive.untar_archive(tez_source_file, tez_temp_dir)
+
+ hadoop_lib_native_dir = os.path.join(mapreduce_temp_dir, "hadoop", "lib", "native")
+ tez_lib_dir = os.path.join(tez_temp_dir, "lib")
+
+ if not os.path.exists(hadoop_lib_native_dir):
+ raise Fail("Unable to seed the Tez tarball with native libraries since the source Hadoop native lib directory {0} does not exist".format(hadoop_lib_native_dir))
+
+ if not os.path.exists(tez_lib_dir):
+ raise Fail("Unable to seed the Tez tarball with native libraries since the target Tez lib directory {0} does not exist".format(tez_lib_dir))
+
+ Execute(("cp", "-a", hadoop_lib_native_dir, tez_lib_dir), sudo = True)
+
+ tez_tarball_with_native_lib = os.path.join(os.path.dirname(tez_source_file), "tez-native.tar.gz")
+ Logger.info("Creating a new Tez tarball at {0}".format(tez_tarball_with_native_lib))
+
+ # tar up Tez, making sure to specify nothing for the arcname so that it does not include an absolute path
+ with closing(tarfile.open(tez_tarball_with_native_lib, "w:gz")) as new_tez_tarball:
+ new_tez_tarball.add(tez_temp_dir, arcname=os.path.sep)
+
+ # cleanup
+ sudo.rmtree(mapreduce_temp_dir)
+ sudo.rmtree(tez_temp_dir)
+
+ return tez_tarball_with_native_lib
+
+
# TODO, in the future, each stack can define its own mapping of tarballs
# inside the stack definition directory in some sort of xml file.
# PLEASE DO NOT put this in cluster-env since it becomes much harder to change,
@@ -50,7 +106,8 @@ TARBALL_MAP = {
"tez": {
"dirs": ("{0}/{1}/tez/lib/tez.tar.gz".format(STACK_ROOT_PATTERN, STACK_VERSION_PATTERN),
"/{0}/apps/{1}/tez/tez.tar.gz".format(STACK_NAME_PATTERN, STACK_VERSION_PATTERN)),
- "service": "TEZ"
+ "service": "TEZ",
+ "prepare_function": _prepare_tez_tarball
},
"tez_hive2": {
@@ -120,29 +177,29 @@ def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_so
:param use_upgrading_version_during_upgrade:
:param custom_source_file: If specified, use this source path instead of the default one from the map.
:param custom_dest_file: If specified, use this destination path instead of the default one from the map.
- :return: A tuple of (success status, source path, destination path)
+ :return: A tuple of (success status, source path, destination path, optional preparation function which is invoked to setup the tarball)
"""
stack_name = Script.get_stack_name()
if not stack_name:
Logger.error("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name)))
- return (False, None, None)
+ return False, None, None
if name is None or name.lower() not in TARBALL_MAP:
Logger.error("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name)))
- return (False, None, None)
+ return False, None, None
service = TARBALL_MAP[name.lower()]['service']
stack_version = get_current_version(service=service, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade)
if not stack_version:
Logger.error("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name)))
- return (False, None, None)
+ return False, None, None
stack_root = Script.get_stack_root()
if not stack_root:
Logger.error("Cannot copy {0} tarball to HDFS because stack root could be be determined.".format(str(name)))
- return (False, None, None)
+ return False, None, None
(source_file, dest_file) = TARBALL_MAP[name.lower()]['dirs']
@@ -161,7 +218,11 @@ def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_so
source_file = source_file.replace(STACK_VERSION_PATTERN, stack_version)
dest_file = dest_file.replace(STACK_VERSION_PATTERN, stack_version)
- return (True, source_file, dest_file)
+ prepare_function = None
+ if "prepare_function" in TARBALL_MAP[name.lower()]:
+ prepare_function = TARBALL_MAP[name.lower()]['prepare_function']
+
+ return True, source_file, dest_file, prepare_function
def get_current_version(service=None, use_upgrading_version_during_upgrade=True):
@@ -258,8 +319,8 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
import params
Logger.info("Called copy_to_hdfs tarball: {0}".format(name))
- (success, source_file, dest_file) = get_tarball_paths(name, use_upgrading_version_during_upgrade,
- custom_source_file, custom_dest_file)
+ (success, source_file, dest_file, prepare_function) = get_tarball_paths(name, use_upgrading_version_during_upgrade,
+ custom_source_file, custom_dest_file)
if not success:
Logger.error("Could not copy tarball {0} due to a missing or incorrect parameter.".format(str(name)))
@@ -289,6 +350,9 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non
# The logic above cannot be used until fast-hdfs-resource.jar supports the mv command, or it switches
# to WebHDFS.
+ # if there is a function which is needed to prepare the tarball, then invoke it first
+ if prepare_function is not None:
+ source_file = prepare_function()
# If the directory already exists, it is a NO-OP
dest_dir = os.path.dirname(dest_file)
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e247b3f/ambari-common/src/main/python/resource_management/libraries/functions/tar_archive.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/tar_archive.py b/ambari-common/src/main/python/resource_management/libraries/functions/tar_archive.py
index c682c3e..194520f 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/tar_archive.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/tar_archive.py
@@ -46,11 +46,13 @@ def archive_directory_dereference(archive, directory):
try_sleep = 1,
)
-def untar_archive(archive, directory):
+def untar_archive(archive, directory, silent=True):
"""
:param directory: can be a symlink and is followed
"""
- Execute(('tar','-xvf',archive,'-C',directory+"/"),
+ options = "-xf" if silent else "-xvf"
+
+ Execute(('tar',options,archive,'-C',directory+"/"),
sudo = True,
tries = 3,
try_sleep = 1,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e247b3f/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 8c48347..e15cfdb 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -248,7 +248,7 @@ class TestFalconServer(RMFTestCase):
sudo = True,
)
self.assertResourceCalled('Execute', ('tar',
- '-xvf',
+ '-xf',
'/tmp/falcon-upgrade-backup/falcon-local-backup.tar',
'-C',
u'/hadoop/falcon/'),
@@ -433,7 +433,7 @@ class TestFalconServer(RMFTestCase):
self.assertResourceCalled('Execute',
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-server', version), sudo=True,)
self.assertResourceCalled('Execute', ('tar',
- '-xvf',
+ '-xf',
'/tmp/falcon-upgrade-backup/falcon-local-backup.tar',
'-C',
u'/hadoop/falcon/'),
@@ -473,7 +473,7 @@ class TestFalconServer(RMFTestCase):
('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'falcon-server', version), sudo=True,)
self.assertResourceCalled('Execute', ('tar',
- '-xvf',
+ '-xf',
'/tmp/falcon-upgrade-backup/falcon-local-backup.tar',
'-C',
u'/hadoop/falcon/'),
[03/11] ambari git commit: AMBARI-22444 - Add Native Libraries To Tez
Tarball (part2) (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-22444 - Add Native Libraries To Tez Tarball (part2) (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8496eed
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8496eed
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8496eed
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: a8496eedbf55310bc37e44d0012520ed0e014737
Parents: 822fe2d
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Nov 15 17:37:57 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Nov 15 17:37:57 2017 -0500
----------------------------------------------------------------------
.../libraries/functions/copy_tarball.py | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a8496eed/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 6d0650d..1dbf1a4 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -33,6 +33,7 @@ from resource_management.core import shell
from resource_management.core import sudo
from resource_management.core.logger import Logger
from resource_management.core.exceptions import Fail
+from resource_management.core.resources.system import Directory
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions import stack_tools, stack_features, stack_select
from resource_management.libraries.functions import tar_archive
@@ -77,7 +78,14 @@ def _prepare_tez_tarball():
Execute(("cp", "-a", hadoop_lib_native_dir, tez_lib_dir), sudo = True)
- tez_tarball_with_native_lib = os.path.join(os.path.dirname(tez_source_file), "tez-native.tar.gz")
+ tez_native_tarball_staging_dir = os.path.join(temp_dir, "tez-native-tarball-staging")
+ if not os.path.exists(tez_native_tarball_staging_dir):
+ Directory(tez_native_tarball_staging_dir,
+ cd_access='a',
+ create_parents = True,
+ recursive_ownership = True)
+
+ tez_tarball_with_native_lib = os.path.join(tez_native_tarball_staging_dir, "tez-native.tar.gz")
Logger.info("Creating a new Tez tarball at {0}".format(tez_tarball_with_native_lib))
# tar up Tez, making sure to specify nothing for the arcname so that it does not include an absolute path
[08/11] ambari git commit: AMBARI-22444 - Add Native Libraries To Tez
Tarball (part4) (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-22444 - Add Native Libraries To Tez Tarball (part4) (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/72785da8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/72785da8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/72785da8
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 72785da88a1c93a12d45f4fdad815e144a1beb5f
Parents: 2951257
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Nov 16 14:11:28 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 16 14:11:37 2017 -0500
----------------------------------------------------------------------
.../resource_management/libraries/functions/copy_tarball.py | 3 +++
1 file changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/72785da8/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index dfe7c62..b05c97c 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -101,6 +101,9 @@ def _prepare_tez_tarball():
with closing(tarfile.open(tez_tarball_with_native_lib, "w:gz")) as new_tez_tarball:
new_tez_tarball.add(tez_temp_dir, arcname=os.path.sep)
+ # ensure that the tarball can be read and uploaded
+ sudo.chmod(tez_tarball_with_native_lib, 0744)
+
# cleanup
sudo.rmtree(mapreduce_temp_dir)
sudo.rmtree(tez_temp_dir)
[06/11] ambari git commit: AMBARI-22444 - Add Native Libraries To Tez
Tarball (part3) (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-22444 - Add Native Libraries To Tez Tarball (part3) (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3279bef5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3279bef5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3279bef5
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 3279bef5288cc84f89cbd1a170b78f5af276eae3
Parents: 658e76e
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Nov 16 09:43:24 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 16 11:44:18 2017 -0500
----------------------------------------------------------------------
.../resource_management/libraries/functions/copy_tarball.py | 9 +++++++++
1 file changed, 9 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/3279bef5/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 1dbf1a4..dfe7c62 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -58,8 +58,11 @@ def _prepare_tez_tarball():
temp_dir = Script.get_tmp_dir()
+ # create the temp staging directories ensuring that non-root agents using tarfile can work with them
mapreduce_temp_dir = tempfile.mkdtemp(prefix="mapreduce-tarball-", dir=temp_dir)
tez_temp_dir = tempfile.mkdtemp(prefix="tez-tarball-", dir=temp_dir)
+ sudo.chmod(mapreduce_temp_dir, 0777)
+ sudo.chmod(tez_temp_dir, 0777)
Logger.info("Extracting {0} to {1}".format(mapreduce_source_file, mapreduce_temp_dir))
tar_archive.extract_archive(mapreduce_source_file, mapreduce_temp_dir)
@@ -76,11 +79,17 @@ def _prepare_tez_tarball():
if not os.path.exists(tez_lib_dir):
raise Fail("Unable to seed the Tez tarball with native libraries since the target Tez lib directory {0} does not exist".format(tez_lib_dir))
+ # ensure that the tez/lib directory is readable by non-root (which it typically is not)
+ sudo.chmod(tez_lib_dir, 0755)
+
+ # copy native libraries from hadoop to tez
Execute(("cp", "-a", hadoop_lib_native_dir, tez_lib_dir), sudo = True)
+ # create the staging directory so that non-root agents can write to it
tez_native_tarball_staging_dir = os.path.join(temp_dir, "tez-native-tarball-staging")
if not os.path.exists(tez_native_tarball_staging_dir):
Directory(tez_native_tarball_staging_dir,
+ mode = 0777,
cd_access='a',
create_parents = True,
recursive_ownership = True)
[02/11] ambari git commit: AMBARI-22445. Warn the user appropriately
for default MySQL server install for Hive (vsubramanian)
Posted by jo...@apache.org.
AMBARI-22445. Warn the user appropriately for default MySQL server install for Hive (vsubramanian)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/822fe2d5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/822fe2d5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/822fe2d5
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 822fe2d5aa02c6d7f1fafdf74b5c0e23cf657bbc
Parents: 6e247b3
Author: Vivek Ratnavel Subramanian <vi...@gmail.com>
Authored: Wed Nov 15 14:11:28 2017 -0800
Committer: Vivek Ratnavel Subramanian <vi...@gmail.com>
Committed: Wed Nov 15 14:13:06 2017 -0800
----------------------------------------------------------------------
ambari-web/app/views/common/controls_view.js | 8 +++++++-
ambari-web/test/views/common/controls_view_test.js | 2 +-
2 files changed, 8 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/822fe2d5/ambari-web/app/views/common/controls_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/controls_view.js b/ambari-web/app/views/common/controls_view.js
index f03e5c0..74c1e6e 100644
--- a/ambari-web/app/views/common/controls_view.js
+++ b/ambari-web/app/views/common/controls_view.js
@@ -519,6 +519,7 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
if (['addServiceController', 'installerController'].contains(this.get('controller.wizardController.name')) && !App.StackService.find(this.get('serviceConfig.serviceName')).get('isInstalled')) {
if (this.get('isNewDb') || this.get('dontUseHandleDbConnection').contains(this.get('serviceConfig.name'))) {
this.onOptionsChange();
+ this.handleDBConnectionProperty();
} else {
if ((App.get('isHadoopWindowsStack') && this.get('inMSSQLWithIA')) || this.get('serviceConfig.name') === 'DB_FLAVOR') {
this.onOptionsChange();
@@ -728,6 +729,11 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
// check for all db types when installing Ranger - not only for existing ones
checkDatabase = true;
}
+ // Hive specific
+ if (this.get('serviceConfig.serviceName') === 'HIVE') {
+ // check for all db types when installing Hive - not only for existing ones
+ checkDatabase = true;
+ }
if (propertyAppendTo1) {
propertyAppendTo1.set('additionalView', null);
}
@@ -737,7 +743,7 @@ App.ServiceConfigRadioButtons = Ember.View.extend(App.ServiceConfigCalculateId,
var shouldAdditionalViewsBeSet = currentDB && checkDatabase && handledProperties.contains(this.get('serviceConfig.name')),
driver = this.getDefaultPropertyValue('sql_jar_connector') ? this.getDefaultPropertyValue('sql_jar_connector').split("/").pop() : 'driver.jar',
dbType = this.getDefaultPropertyValue('db_type'),
- additionalView1 = shouldAdditionalViewsBeSet ? App.CheckDBConnectionView.extend({databaseName: dbType}) : null,
+ additionalView1 = shouldAdditionalViewsBeSet && !this.get('isNewDb') ? App.CheckDBConnectionView.extend({databaseName: dbType}) : null,
additionalView2 = shouldAdditionalViewsBeSet ? Ember.View.extend({
template: Ember.Handlebars.compile('<div class="alert">{{{view.message}}}</div>'),
message: function() {
http://git-wip-us.apache.org/repos/asf/ambari/blob/822fe2d5/ambari-web/test/views/common/controls_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/controls_view_test.js b/ambari-web/test/views/common/controls_view_test.js
index c5c5338..2d18396 100644
--- a/ambari-web/test/views/common/controls_view_test.js
+++ b/ambari-web/test/views/common/controls_view_test.js
@@ -59,7 +59,7 @@ describe('App.ServiceConfigRadioButtons', function () {
propertyAppendTo1: 'javax.jdo.option.ConnectionURL',
propertyAppendTo2: 'hive_database',
isAdditionalView1Null: true,
- isAdditionalView2Null: true,
+ isAdditionalView2Null: false,
title: 'Hive, embedded database'
},
{
[04/11] ambari git commit: AMBARI-22260. Update Spark2 log4j default
settings to latest. (sai.sai.shao via sshridhar).
Posted by jo...@apache.org.
AMBARI-22260. Update Spark2 log4j default settings to latest. (sai.sai.shao via sshridhar).
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/26cf2cf2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/26cf2cf2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/26cf2cf2
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 26cf2cf21174d15c5267c18c1e8d10da7a915957
Parents: a8496ee
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Nov 15 16:25:23 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Nov 15 16:25:23 2017 -0800
----------------------------------------------------------------------
.../stacks/HDP/2.5/upgrades/config-upgrade.xml | 23 ++++++++
.../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 23 +++++---
.../stacks/HDP/2.5/upgrades/upgrade-2.6.xml | 45 +++++++++-------
.../configuration/spark2-log4j-properties.xml | 56 ++++++++++++++++++++
4 files changed, 122 insertions(+), 25 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/26cf2cf2/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index d7194cc..4dd7f6f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -158,14 +158,37 @@
</service>
<service name="SPARK2">
+ <component name="SPARK2_JOBHISTORYSERVER">
+ <changes>
+ <definition xsi:type="configure" id="hdp_2_5_0_0_spark2_jobhistoryserver_log4j">
+ <type>spark2-log4j-properties</type>
+ <replace key="content" find="log4j.logger.org.eclipse.jetty=WARN" replace-with="log4j.logger.org.spark_project.jetty=WARN"/>
+ <replace key="content" find="log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR" replace-with="log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR"/>
+ </definition>
+ </changes>
+ </component>
<component name="SPARK2_CLIENT">
<changes>
+ <definition xsi:type="configure" id="hdp_2_5_0_0_spark2_client_log4j">
+ <type>spark2-log4j-properties</type>
+ <replace key="content" find="log4j.logger.org.eclipse.jetty=WARN" replace-with="log4j.logger.org.spark_project.jetty=WARN"/>
+ <replace key="content" find="log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR" replace-with="log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR"/>
+ </definition>
<definition xsi:type="configure" id="hdp_2_5_0_0_spark2_yarn_queue">
<type>spark2-defaults</type>
<set key="spark.yarn.queue" value="default" if-type="spark-defaults" if-key="spark.yarn.queue" if-key-state="absent"/>
</definition>
</changes>
</component>
+ <component name="SPARK2_THRIFTSERVER">
+ <changes>
+ <definition xsi:type="configure" id="hdp_2_5_0_0_spark2_thriftserver_log4j">
+ <type>spark2-log4j-properties</type>
+ <replace key="content" find="log4j.logger.org.eclipse.jetty=WARN" replace-with="log4j.logger.org.spark_project.jetty=WARN"/>
+ <replace key="content" find="log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR" replace-with="log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR"/>
+ </definition>
+ </changes>
+ </component>
</service>
<service name="TEZ">
http://git-wip-us.apache.org/repos/asf/ambari/blob/26cf2cf2/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index 6f2ede9..d13afcf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -77,7 +77,7 @@
<service name="FLUME">
<component>FLUME_HANDLER</component>
</service>
-
+
<service name="ACCUMULO">
<component>ACCUMULO_TRACER</component>
<component>ACCUMULO_GC</component>
@@ -348,7 +348,7 @@
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixCapacitySchedulerOrderingPolicy">
<summary>Validate Root Queue Ordering Policy</summary>
</task>
- </execute-stage>
+ </execute-stage>
<!--Yarn Apptimeline server-->
<execute-stage service="YARN" component="APP_TIMELINE_SERVER" title="Apply config changes for App timeline server">
@@ -679,6 +679,15 @@
<summary>Add queue customization property</summary>
</task>
</execute-stage>
+ <execute-stage service="SPARK2" component="SPARK2_THRIFTSERVER" title="Apply config changes for Spark2 ThriftServer log4j">
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_thriftserver_log4j"/>
+ </execute-stage>
+ <execute-stage service="SPARK2" component="SPARK2_JOBHISTORYSERVER" title="Apply config changes for Spark2 Job HistoryServer log4j">
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_jobhistoryserver_log4j"/>
+ </execute-stage>
+ <execute-stage service="SPARK2" component="SPARK2_CLIENT" title="Apply config changes for Spark2 Client log4j">
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_client_log4j"/>
+ </execute-stage>
</group>
<!--
@@ -720,7 +729,7 @@
</task>
</execute-stage>
</group>
-
+
<!-- Now, restart all of the services. -->
<group xsi:type="restart" name="ZOOKEEPER" title="ZooKeeper">
<service-check>false</service-check>
@@ -1083,7 +1092,7 @@
<component>FLUME_HANDLER</component>
</service>
</group>
-
+
<group xsi:type="restart" name="ACCUMULO" title="Accumulo">
<service-check>false</service-check>
<skippable>true</skippable>
@@ -1112,7 +1121,7 @@
<group xsi:type="cluster" name="FINALIZE_PRE_CHECK" title="Finalize {{direction.text.proper}} Pre-Check">
<direction>UPGRADE</direction>
-
+
<execute-stage title="Check Component Versions">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.ComponentVersionCheckAction" />
</execute-stage>
@@ -1192,7 +1201,7 @@
<function>setup_ranger_java_patches</function>
</task>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
<upgrade>
@@ -1215,7 +1224,7 @@
<function>configure_atlas_user_for_tagsync</function>
</task>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
<upgrade>
http://git-wip-us.apache.org/repos/asf/ambari/blob/26cf2cf2/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index b908b6f..69f5eaf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -183,7 +183,7 @@
<component>HBASE_MASTER</component>
</service>
</group>
-
+
<group name="SERVICE_CHECK_1" title="All Service Checks" xsi:type="service-check">
<direction>UPGRADE</direction>
<skippable>true</skippable>
@@ -199,7 +199,7 @@
<service>LOGSEARCH</service>
</exclude>
</group>
-
+
<group name="CORE_SLAVES" title="Core Slaves" xsi:type="colocated">
<service-check>false</service-check>
<skippable>true</skippable>
@@ -207,7 +207,7 @@
<component>DATANODE</component>
<component>NFS_GATEWAY</component>
</service>
-
+
<service name="HBASE">
<component>HBASE_REGIONSERVER</component>
<component>PHOENIX_QUERY_SERVER</component>
@@ -223,7 +223,7 @@
<message>The initial batch of {{components}} hosts have been {{direction.past}}. You are advised to check the hosts and perform cluster/workload-specific tests against your cluster to ensure proper operation before proceeding with {{direction.text}} of the remaining services.</message>
</batch>
</group>
-
+
<group name="SERVICE_CHECK_2" title="All Service Checks" xsi:type="service-check">
<direction>UPGRADE</direction>
<skippable>true</skippable>
@@ -458,7 +458,7 @@
<group xsi:type="cluster" name="FINALIZE_PRE_CHECK" title="Finalize {{direction.text.proper}} Pre-Check">
<direction>UPGRADE</direction>
-
+
<execute-stage title="Check Component Versions">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.ComponentVersionCheckAction" />
</execute-stage>
@@ -467,7 +467,7 @@
<group xsi:type="cluster" name="POST_CLUSTER" title="Finalize {{direction.text.proper}}">
<skippable>true</skippable>
<supports-auto-skip-failure>false</supports-auto-skip-failure>
-
+
<execute-stage title="Confirm Finalize">
<direction>UPGRADE</direction>
<task xsi:type="manual">
@@ -491,7 +491,7 @@
<function>finalize_rolling_upgrade</function>
</task>
</execute-stage>
-
+
<execute-stage title="Save Cluster State">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FinalizeUpgradeAction">
</task>
@@ -577,7 +577,7 @@
<function>configure_atlas_user_for_tagsync</function>
</task>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
<upgrade>
@@ -668,7 +668,7 @@
</upgrade>
</component>
</service>
-
+
<service name="SLIDER">
<component name="SLIDER">
<upgrade>
@@ -691,7 +691,7 @@
</task>
<task xsi:type="configure" id="hdp_2_6_0_0_mapreduce_job_queuename"/>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
<upgrade>
@@ -722,11 +722,11 @@
<task xsi:type="configure" id="yarn_site_retained_log_count" />
<task xsi:type="configure" id="hdp_2_6_0_0_service_check_queue_name"/>
<task xsi:type="configure" id="hdp_2_6_0_0_ats_scan_interval_default"/>
-
+
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.FixCapacitySchedulerOrderingPolicy">
<summary>Validate Root Queue Ordering Policy</summary>
</task>
-
+
</pre-upgrade>
<pre-downgrade />
<upgrade>
@@ -785,9 +785,9 @@
</task>
<task xsi:type="configure" id="hdp_2_5_0_0_tez_queue_name"/>
</pre-upgrade>
-
+
<pre-downgrade copy-upgrade="true" />
-
+
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -865,9 +865,9 @@
<task xsi:type="configure" id="hdp_2_6_maint_llap_config"/>
<task xsi:type="configure" id="llap_update_tez_shuffle_ssl_enable"/>
</pre-upgrade>
-
+
<pre-downgrade />
-
+
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -913,9 +913,9 @@
<task xsi:type="configure" id="hdp_2_5_0_0_rename_spark_livy_configs" />
<task xsi:type="configure" id="hdp_2_5_0_0_add_spark_conf_dir_livy_configs" />
</pre-upgrade>
-
+
<pre-downgrade />
-
+
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -933,11 +933,19 @@
<service name="SPARK2">
<component name="SPARK2_JOBHISTORYSERVER">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_jobhistoryserver_log4j"/>
+ </pre-upgrade>
+ <pre-downgrade />
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
</component>
<component name="SPARK2_THRIFTSERVER">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_thriftserver_log4j"/>
+ </pre-upgrade>
+ <pre-downgrade />
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
@@ -945,6 +953,7 @@
<component name="SPARK2_CLIENT">
<pre-upgrade>
<task xsi:type="configure" id="hdp_2_5_0_0_spark2_yarn_queue"/>
+ <task xsi:type="configure" id="hdp_2_5_0_0_spark2_client_log4j"/>
</pre-upgrade>
<pre-downgrade/>
<upgrade>
http://git-wip-us.apache.org/repos/asf/ambari/blob/26cf2cf2/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml
new file mode 100644
index 0000000..d7479f4
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/configuration/spark2-log4j-properties.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+ <property>
+ <name>content</name>
+ <description>Spark2-log4j-Properties</description>
+ <value>
+# Set everything to be logged to the console
+log4j.rootCategory=INFO, console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Set the default spark-shell log level to WARN. When running the spark-shell, the
+# log level for this class is used to overwrite the root logger's log level, so that
+# the user can have different defaults for the shell and regular Spark apps.
+log4j.logger.org.apache.spark.repl.Main=WARN
+
+# Settings to quiet third party logs that are too verbose
+log4j.logger.org.spark_project.jetty=WARN
+log4j.logger.org.spark_project.jetty.util.component.AbstractLifeCycle=ERROR
+log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO
+log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO
+log4j.logger.org.apache.parquet=ERROR
+log4j.logger.parquet=ERROR
+
+# SPARK-9183: Settings to avoid annoying messages when looking up nonexistent UDFs in SparkSQL with Hive support
+log4j.logger.org.apache.hadoop.hive.metastore.RetryingHMSHandler=FATAL
+log4j.logger.org.apache.hadoop.hive.ql.exec.FunctionRegistry=ERROR
+ </value>
+ <value-attributes>
+ <type>content</type>
+ <show-property-name>false</show-property-name>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+</configuration>
[09/11] ambari git commit: AMBARI-22459 IOP/HDP migration NN HA
restart post Ambari upgrade fails (dili)
Posted by jo...@apache.org.
AMBARI-22459 IOP/HDP migration NN HA restart post Ambari upgrade fails (dili)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/946356a9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/946356a9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/946356a9
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 946356a9cc9cd1999c05888b1799b8e1ac703491
Parents: 72785da
Author: Di Li <di...@apache.org>
Authored: Thu Nov 16 14:51:53 2017 -0500
Committer: Di Li <di...@apache.org>
Committed: Thu Nov 16 14:51:53 2017 -0500
----------------------------------------------------------------------
.../BigInsights/4.0/properties/stack_packages.json | 12 ------------
1 file changed, 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/946356a9/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_packages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_packages.json b/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_packages.json
index f890d66..7a12011 100644
--- a/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_packages.json
+++ b/ambari-server/src/main/resources/stacks/BigInsights/4.0/properties/stack_packages.json
@@ -178,18 +178,6 @@
"STANDARD": [
"hadoop-hdfs-secondarynamenode"
]
- },
- "ZKFC": {
- "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
- "INSTALL": [
- "hadoop-hdfs-zkfc"
- ],
- "PATCH": [
- "hadoop-hdfs-zkfc"
- ],
- "STANDARD": [
- "hadoop-hdfs-zkfc"
- ]
}
},
"HIVE": {
[11/11] ambari git commit: Merge branch 'branch-2.6' into
branch-feature-AMBARI-22457
Posted by jo...@apache.org.
Merge branch 'branch-2.6' into branch-feature-AMBARI-22457
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2f02bc69
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2f02bc69
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2f02bc69
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 2f02bc69376aa03cd56ca8789cd1d02e902604bc
Parents: 3df5ae7 c09c69b
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Nov 17 08:30:31 2017 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Nov 17 08:30:31 2017 -0500
----------------------------------------------------------------------
.../libraries/functions/copy_tarball.py | 102 +++++-
.../libraries/functions/tar_archive.py | 6 +-
.../internal/UpgradeResourceProvider.java | 36 ++
.../upgrades/CreateAndConfigureAction.java | 164 +++++++++
.../ambari/server/state/stack/UpgradePack.java | 3 +
.../state/stack/upgrade/ClusterGrouping.java | 2 +
.../stack/upgrade/CreateAndConfigureTask.java | 57 +++
.../ambari/server/state/stack/upgrade/Task.java | 6 +-
.../HIVE/0.12.0.2.0/configuration/hive-site.xml | 2 +-
.../4.0/properties/stack_packages.json | 12 -
.../stacks/HDP/2.0.6/role_command_order.json | 1 +
.../services/HIVE/configuration/hive-site.xml | 2 +-
.../services/HIVE/configuration/hive-site.xml | 2 +-
.../stacks/HDP/2.5/upgrades/config-upgrade.xml | 23 ++
.../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 23 +-
.../stacks/HDP/2.5/upgrades/upgrade-2.6.xml | 45 ++-
.../configuration/spark2-log4j-properties.xml | 56 +++
.../src/main/resources/upgrade-pack.xsd | 9 +-
.../upgrades/CreateAndConfigureActionTest.java | 357 +++++++++++++++++++
.../stacks/2.1/FALCON/test_falcon_server.py | 6 +-
ambari-web/app/views/common/controls_view.js | 8 +-
.../test/views/common/controls_view_test.js | 2 +-
22 files changed, 866 insertions(+), 58 deletions(-)
----------------------------------------------------------------------
[05/11] ambari git commit: AMBARI-22460. NFSGateway start failed
(aonishuk)
Posted by jo...@apache.org.
AMBARI-22460. NFSGateway start failed (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/658e76e8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/658e76e8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/658e76e8
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 658e76e894bfa8ffd71f15e32bed8759bd5e6a1f
Parents: 26cf2cf
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Nov 16 12:26:59 2017 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Nov 16 12:26:59 2017 +0200
----------------------------------------------------------------------
.../src/main/resources/stacks/HDP/2.0.6/role_command_order.json | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/658e76e8/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
index 78a31f1..3f576bf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/role_command_order.json
@@ -14,6 +14,7 @@
"HIVE_SERVER-RESTART": ["NODEMANAGER-RESTART", "MYSQL_SERVER-RESTART", "ZOOKEEPER_SERVER-RESTART"],
"HUE_SERVER-START": ["HIVE_SERVER-START", "HCAT-START", "OOZIE_SERVER-START"],
"FLUME_HANDLER-START": ["OOZIE_SERVER-START"],
+ "NFS_GATEWAY-START": ["NAMENODE-START"],
"MAPREDUCE_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"],
"OOZIE_SERVICE_CHECK-SERVICE_CHECK": ["OOZIE_SERVER-START", "MAPREDUCE2_SERVICE_CHECK-SERVICE_CHECK"],
"HBASE_SERVICE_CHECK-SERVICE_CHECK": ["HBASE_MASTER-START", "HBASE_REGIONSERVER-START"],
[07/11] ambari git commit: AMBARI-22431 Able to add config type if
EU/RU of the same stack (minor version upgrade) (dili)
Posted by jo...@apache.org.
AMBARI-22431 Able to add config type if EU/RU of the same stack (minor version upgrade) (dili)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29512573
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29512573
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29512573
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: 29512573090948e29d38bb0d259f72b0a5e42e99
Parents: 3279bef
Author: Di Li <di...@apache.org>
Authored: Thu Nov 16 12:22:35 2017 -0500
Committer: Di Li <di...@apache.org>
Committed: Thu Nov 16 12:22:35 2017 -0500
----------------------------------------------------------------------
.../internal/UpgradeResourceProvider.java | 36 ++
.../upgrades/CreateAndConfigureAction.java | 164 +++++++++
.../ambari/server/state/stack/UpgradePack.java | 3 +
.../state/stack/upgrade/ClusterGrouping.java | 2 +
.../stack/upgrade/CreateAndConfigureTask.java | 57 +++
.../ambari/server/state/stack/upgrade/Task.java | 6 +-
.../src/main/resources/upgrade-pack.xsd | 9 +-
.../upgrades/CreateAndConfigureActionTest.java | 357 +++++++++++++++++++
8 files changed, 632 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 66f5bf9..b6846f7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -92,6 +92,7 @@ import org.apache.ambari.server.state.UpgradeHelper.UpgradeGroupHolder;
import org.apache.ambari.server.state.stack.ConfigUpgradePack;
import org.apache.ambari.server.state.stack.UpgradePack;
import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.CreateAndConfigureTask;
import org.apache.ambari.server.state.stack.upgrade.Direction;
import org.apache.ambari.server.state.stack.upgrade.ManualTask;
import org.apache.ambari.server.state.stack.upgrade.ServerSideActionTask;
@@ -1336,6 +1337,41 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
break;
}
+ case CREATE_AND_CONFIGURE: {
+ CreateAndConfigureTask ct = (CreateAndConfigureTask) task;
+
+ // !!! would prefer to do this in the sequence generator, but there's too many
+ // places to miss
+ if (context.getOrchestrationType().isRevertable() && !ct.supportsPatch) {
+ process = false;
+ }
+
+ Map<String, String> configurationChanges =
+ ct.getConfigurationChanges(cluster, configUpgradePack);
+
+ // add all configuration changes to the command params
+ commandParams.putAll(configurationChanges);
+
+ // extract the config type to build the summary
+ String configType = configurationChanges.get(CreateAndConfigureTask.PARAMETER_CONFIG_TYPE);
+ if (null != configType) {
+ itemDetail = String.format("Updating configuration %s", configType);
+ } else {
+ itemDetail = "Skipping Configuration Task "
+ + StringUtils.defaultString(ct.id, "(missing id)");
+ }
+
+ entity.setText(itemDetail);
+
+ String configureTaskSummary = ct.getSummary(configUpgradePack);
+ if (null != configureTaskSummary) {
+ stageText = configureTaskSummary;
+ } else {
+ stageText = itemDetail;
+ }
+
+ break;
+ }
default:
break;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureAction.java
new file mode 100644
index 0000000..e60938a
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureAction.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ConfigurationRequest;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.DesiredConfig;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.UpgradeContext;
+import org.apache.ambari.server.state.stack.upgrade.CreateAndConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.inject.Inject;
+
+/**
+ * The {@link CreateAndConfigureAction} is used to alter a configuration property during
+ * an upgrade. It also creates the config type if it does not exist as a desired config for the cluster.
+ * It will only produce a new configuration if an actual change is
+ * occuring. For some configure tasks, the value is already at the desired
+ * property or the conditions of the task are not met. In these cases, a new
+ * configuration will not be created. This task can perform any of the following
+ * actions in a single declaration:
+ * <ul>
+ * <li>Copy a configuration to a new property key, optionally setting a default
+ * if the original property did not exist</li>
+ * <li>Copy a configuration to a new property key from one configuration type to
+ * another, optionally setting a default if the original property did not exist</li>
+ * <li>Rename a configuration, optionally setting a default if the original
+ * property did not exist</li>
+ * <li>Delete a configuration property</li>
+ * <li>Set a configuration property</li>
+ * <li>Conditionally set a configuration property based on another configuration
+ * property value</li>
+ * </ul>
+ */
+public class CreateAndConfigureAction extends ConfigureAction {
+
+ private static final Logger LOG = LoggerFactory.getLogger(CreateAndConfigureAction.class);
+
+ /**
+ * Used to lookup the cluster.
+ */
+ @Inject
+ private Clusters m_clusters;
+
+ /**
+ * Used to update the configuration properties.
+ */
+ @Inject
+ private AmbariManagementController m_controller;
+
+ /**
+ * Used to assist in the creation of a {@link ConfigurationRequest} to update
+ * configuration values.
+ */
+ @Inject
+ private ConfigHelper m_configHelper;
+
+
+ @Override
+ public CommandReport execute(
+ ConcurrentMap<String, Object> requestSharedDataContext)
+ throws AmbariException, InterruptedException {
+
+ LOG.info("Create and Configure...");
+
+ Map<String,String> commandParameters = getCommandParameters();
+ if( null == commandParameters || commandParameters.isEmpty() ){
+ return createCommandReport(0, HostRoleStatus.FAILED, "{}", "",
+ "Unable to change configuration values without command parameters");
+ }
+
+ String clusterName = commandParameters.get("clusterName");
+ Cluster cluster = m_clusters.getCluster(clusterName);
+ UpgradeContext upgradeContext = getUpgradeContext(cluster);
+
+ Direction direction = upgradeContext.getDirection();
+ if (direction == Direction.DOWNGRADE) {
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", "", "Skip changing configuration values for downgrade");
+ }
+
+ String configType = commandParameters.get(CreateAndConfigureTask.PARAMETER_CONFIG_TYPE);
+ String serviceName = cluster.getServiceByConfigType(configType);
+
+ if (StringUtils.isBlank(serviceName)) {
+ serviceName = commandParameters.get(CreateAndConfigureTask.PARAMETER_ASSOCIATED_SERVICE);
+ }
+
+ RepositoryVersionEntity sourceRepoVersion = upgradeContext.getSourceRepositoryVersion(serviceName);
+ RepositoryVersionEntity targetRepoVersion = upgradeContext.getTargetRepositoryVersion(serviceName);
+ StackId sourceStackId = sourceRepoVersion.getStackId();
+ StackId targetStackId = targetRepoVersion.getStackId();
+
+ if (!sourceStackId.equals(targetStackId)){
+ return createCommandReport(0, HostRoleStatus.FAILED, "{}", "",
+ "Unable to change configuration values across stacks. Use regular config task type instead.");
+ }
+
+ Map<String, DesiredConfig> desiredConfigs = cluster.getDesiredConfigs();
+ DesiredConfig desiredConfig = desiredConfigs.get(configType);
+ if (desiredConfig == null) {
+ LOG.info(String.format("Could not find desired config type with name %s. Create it with default values.", configType));
+
+ // populate a map with default configurations from the new stack
+ Map<String, Map<String, String>> newServiceDefaultConfigsByType = m_configHelper.getDefaultProperties(
+ targetStackId, serviceName);
+
+ if (!newServiceDefaultConfigsByType.containsKey(configType)){
+ String error = String.format("%s in %s does not contain configuration type %s", serviceName, targetStackId.getStackId(), configType);
+ LOG.error(error);
+ return createCommandReport(0, HostRoleStatus.FAILED, "{}", "", error);
+ }
+
+ Map<String, String> defaultConfigsForType = newServiceDefaultConfigsByType.get(configType);
+ // Remove any property for the new config type whose value is NULL
+ Iterator<Map.Entry<String, String>> iter = defaultConfigsForType.entrySet().iterator();
+ while (iter.hasNext()) {
+ Map.Entry<String, String> entry = iter.next();
+ if (entry.getValue() == null) {
+ iter.remove();
+ }
+ }
+
+ String serviceVersionNote = String.format("%s %s %s", direction.getText(true),
+ direction.getPreposition(), upgradeContext.getRepositoryVersion().getVersion());
+
+ m_configHelper.createConfigType(cluster, targetStackId,
+ m_controller,
+ configType, defaultConfigsForType,
+ m_controller.getAuthName(), serviceVersionNote);
+ }
+
+ return super.execute(requestSharedDataContext);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
index 56f13ab..43c7a55 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/UpgradePack.java
@@ -38,6 +38,7 @@ import javax.xml.bind.annotation.XmlValue;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.state.stack.upgrade.ClusterGrouping;
import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.CreateAndConfigureTask;
import org.apache.ambari.server.state.stack.upgrade.Direction;
import org.apache.ambari.server.state.stack.upgrade.Grouping;
import org.apache.ambari.server.state.stack.upgrade.ServiceCheckGrouping;
@@ -594,6 +595,8 @@ public class UpgradePack {
for (Task task : tasks) {
if (Task.Type.CONFIGURE == task.getType()) {
((ConfigureTask) task).associatedService = service;
+ } else if (Task.Type.CREATE_AND_CONFIGURE == task.getType()) {
+ ((CreateAndConfigureTask) task).associatedService = service;
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
index 63d0993..c1a05c0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/ClusterGrouping.java
@@ -135,6 +135,8 @@ public class ClusterGrouping extends Grouping {
void afterUnmarshal(Unmarshaller unmarshaller, Object parent) {
if (task.getType().equals(Task.Type.CONFIGURE) && StringUtils.isNotEmpty(service)) {
((ConfigureTask) task).associatedService = service;
+ } else if (task.getType().equals(Task.Type.CREATE_AND_CONFIGURE) && StringUtils.isNotEmpty(service)) {
+ ((CreateAndConfigureTask) task).associatedService = service;
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java
new file mode 100644
index 0000000..d89840f
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/CreateAndConfigureTask.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state.stack.upgrade;
+
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlType;
+
+import org.apache.ambari.server.serveraction.upgrades.CreateAndConfigureAction;
+
+/**
+ * The {@link CreateAndConfigureTask} represents a two step change where the create is for creating a config type if it does not exist
+ * followed by the configuration change.
+ * This task contains id of change. Change definitions are located in a separate file (config
+ * upgrade pack). IDs of change definitions share the same namespace within all stacks.
+ *
+ *
+ * <p/>
+ *
+ * <pre>
+ * {@code
+ * <task xsi:type="create_and_configure" id="hdp_2_3_0_0-UpdateHiveConfig"/>
+ * }
+ * </pre>
+ *
+ */
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlType(name="create_and_configure")
+public class CreateAndConfigureTask extends ConfigureTask {
+
+ public static final String actionVerb = "CreateAndConfiguring";
+
+ /**
+ * Constructor.
+ */
+ public CreateAndConfigureTask() {
+ implClass = CreateAndConfigureAction.class.getName();
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
index 6ab2fd2..2167b7b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/Task.java
@@ -25,7 +25,7 @@ import javax.xml.bind.annotation.XmlSeeAlso;
/**
* Base class to identify the items that could possibly occur during an upgrade
*/
-@XmlSeeAlso(value={ExecuteTask.class, ConfigureTask.class, ManualTask.class, RestartTask.class, StartTask.class, StopTask.class, ServerActionTask.class, ConfigureFunction.class})
+@XmlSeeAlso(value={ExecuteTask.class, CreateAndConfigureTask.class, ConfigureTask.class, ManualTask.class, RestartTask.class, StartTask.class, StopTask.class, ServerActionTask.class, ConfigureFunction.class})
public abstract class Task {
/**
@@ -96,6 +96,10 @@ public abstract class Task {
*/
CONFIGURE,
/**
+ * Task that create a config type if it does not, and alters a configuration.
+ */
+ CREATE_AND_CONFIGURE,
+ /**
* Task that sets up the configuration for subsequent task
*/
CONFIGURE_FUNCTION,
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/main/resources/upgrade-pack.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/upgrade-pack.xsd b/ambari-server/src/main/resources/upgrade-pack.xsd
index 21606bd..249725e 100644
--- a/ambari-server/src/main/resources/upgrade-pack.xsd
+++ b/ambari-server/src/main/resources/upgrade-pack.xsd
@@ -87,7 +87,7 @@
</xs:extension>
</xs:complexContent>
</xs:complexType>
-
+
<xs:complexType name="security">
<xs:complexContent>
<xs:extension base="abstract-condition-type">
@@ -336,6 +336,13 @@
</xs:complexContent>
</xs:complexType>
+ <xs:complexType name="create_and_configure">
+ <xs:complexContent>
+ <xs:extension base="configure">
+ </xs:extension>
+ </xs:complexContent>
+ </xs:complexType>
+
<xs:complexType name="configure_function">
<xs:complexContent>
<xs:extension base="abstract-task-type">
http://git-wip-us.apache.org/repos/asf/ambari/blob/29512573/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java
new file mode 100644
index 0000000..43b5bd0
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java
@@ -0,0 +1,357 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.H2DatabaseCleaner;
+import org.apache.ambari.server.ServiceComponentNotFoundException;
+import org.apache.ambari.server.ServiceNotFoundException;
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.OrmTestHelper;
+import org.apache.ambari.server.orm.dao.RequestDAO;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
+import org.apache.ambari.server.orm.entities.UpgradeHistoryEntity;
+import org.apache.ambari.server.serveraction.ServerAction;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentFactory;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceComponentHostFactory;
+import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.ConfigurationKeyValue;
+import org.apache.ambari.server.state.stack.upgrade.CreateAndConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.apache.commons.lang.StringUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.gson.Gson;
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+
+/**
+ * Tests upgrade-related server side actions
+ */
+public class CreateAndConfigureActionTest {
+
+ @Inject
+ private Injector m_injector;
+
+ @Inject
+ private OrmTestHelper m_helper;
+
+ @Inject
+ private HostRoleCommandFactory hostRoleCommandFactory;
+
+ @Inject
+ private ServiceFactory serviceFactory;
+
+ @Inject
+ private ConfigHelper m_configHelper;
+
+ @Inject
+ private Clusters clusters;
+
+ @Inject
+ private ConfigFactory configFactory;
+
+ @Inject
+ private CreateAndConfigureAction action;
+
+ @Inject
+ private RequestDAO requestDAO;
+
+ @Inject
+ private UpgradeDAO upgradeDAO;
+
+ @Inject
+ private ServiceComponentFactory serviceComponentFactory;
+
+ @Inject
+ private ServiceComponentHostFactory serviceComponentHostFactory;
+
+ private RepositoryVersionEntity repoVersion2110;
+ private RepositoryVersionEntity repoVersion2111;
+ private RepositoryVersionEntity repoVersion2200;
+
+ private final Map<String, Map<String, String>> NO_ATTRIBUTES = new HashMap<>();
+
+ @Before
+ public void setup() throws Exception {
+ m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+ m_injector.getInstance(GuiceJpaInitializer.class);
+ m_injector.injectMembers(this);
+
+ repoVersion2110 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"), "2.1.1.0-1234");
+ repoVersion2111 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"), "2.1.1.1-5678");
+ repoVersion2200 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.2.0"), "2.2.0.0-1234");
+
+ makeUpgradeCluster();
+ }
+
+ @After
+ public void teardown() throws Exception {
+ H2DatabaseCleaner.clearDatabase(m_injector.getProvider(EntityManager.class).get());
+ }
+
+
+ /**
+ * Tests that a new configuration is created when upgrading across stack when
+ * there is no existing configuration with the correct target stack.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testNewConfigCreatedWhenUpgradingWithoutChaningStack() throws Exception {
+ Cluster c = clusters.getCluster("c1");
+ assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+
+ Map<String, String> properties = new HashMap<String, String>() {
+ {
+ put("initLimit", "10");
+ }
+ };
+
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
+
+ c.addDesiredConfig("user", Collections.singleton(config));
+ assertEquals(2, c.getConfigsByType("zoo.cfg").size());
+
+ List<ConfigurationKeyValue> configurations = new ArrayList<>();
+ ConfigurationKeyValue keyValue = new ConfigurationKeyValue();
+ configurations.add(keyValue);
+ keyValue.key = "initLimit";
+ keyValue.value = "11";
+ c.setCurrentStackVersion(repoVersion2110.getStackId());
+ c.setDesiredStackVersion(repoVersion2111.getStackId());
+
+ createUpgrade(c, repoVersion2111);
+
+ Map<String, String> commandParams = new HashMap<>();
+ commandParams.put("clusterName", "c1");
+ commandParams.put(CreateAndConfigureTask.PARAMETER_CONFIG_TYPE, "zoo.cfg");
+ commandParams.put(CreateAndConfigureTask.PARAMETER_KEY_VALUE_PAIRS, new Gson().toJson(configurations));
+
+ ExecutionCommand executionCommand = getExecutionCommand(commandParams);
+ HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null,
+ null, null);
+
+ hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
+ executionCommand));
+
+ action.setExecutionCommand(executionCommand);
+ action.setHostRoleCommand(hostRoleCommand);
+
+ CommandReport report = action.execute(null);
+ assertNotNull(report);
+
+ assertEquals(3, c.getConfigsByType("zoo.cfg").size());
+
+ config = c.getDesiredConfigByType("zoo.cfg");
+ assertNotNull(config);
+ assertFalse(StringUtils.equals("version2", config.getTag()));
+ assertEquals("11", config.getProperties().get("initLimit"));
+ }
+
+ /**
+ * Creates a cluster using {@link #repoVersion2110} with ZooKeeper installed.
+ *
+ * @throws Exception
+ */
+ private void makeUpgradeCluster() throws Exception {
+ String clusterName = "c1";
+ String hostName = "h1";
+
+ clusters.addCluster(clusterName, repoVersion2110.getStackId());
+
+ Cluster c = clusters.getCluster(clusterName);
+
+ // add a host component
+ clusters.addHost(hostName);
+ Host host = clusters.getHost(hostName);
+ Map<String, String> hostAttributes = new HashMap<>();
+ hostAttributes.put("os_family", "redhat");
+ hostAttributes.put("os_release_version", "6");
+ host.setHostAttributes(hostAttributes);
+
+ clusters.mapHostToCluster(hostName, clusterName);
+
+ // !!! very important, otherwise the loops that walk the list of installed
+ // service properties will not run!
+ Service zk = installService(c, "ZOOKEEPER", repoVersion2110);
+ addServiceComponent(c, zk, "ZOOKEEPER_SERVER");
+ addServiceComponent(c, zk, "ZOOKEEPER_CLIENT");
+ createNewServiceComponentHost(c, "ZOOKEEPER", "ZOOKEEPER_SERVER", hostName);
+ createNewServiceComponentHost(c, "ZOOKEEPER", "ZOOKEEPER_CLIENT", hostName);
+
+ Map<String, String> properties = new HashMap<String, String>() {
+ {
+ put("initLimit", "10");
+ }
+ };
+
+ Config config = createConfig(c, "zoo.cfg", "version1", properties);
+
+ c.addDesiredConfig("user", Collections.singleton(config));
+
+ // verify that our configs are there
+ String tickTime = m_configHelper.getPropertyValueFromStackDefinitions(c, "zoo.cfg", "tickTime");
+ assertNotNull(tickTime);
+ }
+
+ /**
+ * Installs a service in the cluster.
+ *
+ * @param cluster
+ * @param serviceName
+ * @return
+ * @throws AmbariException
+ */
+ private Service installService(Cluster cluster, String serviceName,
+ RepositoryVersionEntity repositoryVersion) throws AmbariException {
+ Service service = null;
+
+ try {
+ service = cluster.getService(serviceName);
+ } catch (ServiceNotFoundException e) {
+ service = serviceFactory.createNew(cluster, serviceName, repositoryVersion);
+ cluster.addService(service);
+ }
+
+ return service;
+ }
+
+ private ServiceComponent addServiceComponent(Cluster cluster, Service service,
+ String componentName) throws AmbariException {
+ ServiceComponent serviceComponent = null;
+ try {
+ serviceComponent = service.getServiceComponent(componentName);
+ } catch (ServiceComponentNotFoundException e) {
+ serviceComponent = serviceComponentFactory.createNew(service, componentName);
+ service.addServiceComponent(serviceComponent);
+ serviceComponent.setDesiredState(State.INSTALLED);
+ }
+
+ return serviceComponent;
+ }
+
+ private ServiceComponentHost createNewServiceComponentHost(Cluster cluster, String serviceName,
+ String svcComponent, String hostName) throws AmbariException {
+ Assert.assertNotNull(cluster.getConfigGroups());
+ Service s = cluster.getService(serviceName);
+ ServiceComponent sc = addServiceComponent(cluster, s, svcComponent);
+
+ ServiceComponentHost sch = serviceComponentHostFactory.createNew(sc, hostName);
+
+ sc.addServiceComponentHost(sch);
+ sch.setDesiredState(State.INSTALLED);
+ sch.setState(State.INSTALLED);
+ return sch;
+ }
+
+ /**
+ * Creates an upgrade and associates it with the cluster.
+ */
+ private UpgradeEntity createUpgrade(Cluster cluster, RepositoryVersionEntity repositoryVersion)
+ throws Exception {
+
+ // create some entities for the finalize action to work with for patch
+ // history
+ RequestEntity requestEntity = new RequestEntity();
+ requestEntity.setClusterId(cluster.getClusterId());
+ requestEntity.setRequestId(1L);
+ requestEntity.setStartTime(System.currentTimeMillis());
+ requestEntity.setCreateTime(System.currentTimeMillis());
+ requestDAO.create(requestEntity);
+
+ UpgradeEntity upgradeEntity = new UpgradeEntity();
+ upgradeEntity.setId(1L);
+ upgradeEntity.setClusterId(cluster.getClusterId());
+ upgradeEntity.setRequestEntity(requestEntity);
+ upgradeEntity.setUpgradePackage("");
+ upgradeEntity.setRepositoryVersion(repositoryVersion);
+ upgradeEntity.setUpgradeType(UpgradeType.NON_ROLLING);
+
+ Map<String, Service> services = cluster.getServices();
+ for (String serviceName : services.keySet()) {
+ Service service = services.get(serviceName);
+ Map<String, ServiceComponent> components = service.getServiceComponents();
+ for (String componentName : components.keySet()) {
+ UpgradeHistoryEntity history = new UpgradeHistoryEntity();
+ history.setUpgrade(upgradeEntity);
+ history.setServiceName(serviceName);
+ history.setComponentName(componentName);
+ history.setFromRepositoryVersion(service.getDesiredRepositoryVersion());
+ history.setTargetRepositoryVersion(repositoryVersion);
+ upgradeEntity.addHistory(history);
+ }
+ }
+
+ upgradeDAO.create(upgradeEntity);
+ cluster.setUpgradeEntity(upgradeEntity);
+ return upgradeEntity;
+ }
+
+ private ExecutionCommand getExecutionCommand(Map<String, String> commandParams) {
+ ExecutionCommand executionCommand = new ExecutionCommand();
+ executionCommand.setClusterName("c1");
+ executionCommand.setCommandParams(commandParams);
+ executionCommand.setRoleParams(new HashMap<String, String>());
+ executionCommand.getRoleParams().put(ServerAction.ACTION_USER_NAME, "username");
+
+ return executionCommand;
+ }
+
+ private Config createConfig(Cluster cluster, String type, String tag,
+ Map<String, String> properties) {
+ return configFactory.createNew(cluster, type, tag, properties,
+ NO_ATTRIBUTES);
+ }
+}
\ No newline at end of file
[10/11] ambari git commit: AMBARI-22464. disable
hive.auto.convert.sortmerge.join in Hive configs in Ambari.(vbrodetskyi)
Posted by jo...@apache.org.
AMBARI-22464. disable hive.auto.convert.sortmerge.join in Hive configs in Ambari.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c09c69b2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c09c69b2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c09c69b2
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: c09c69b2fd502801c1355fc4dc275f3922228c81
Parents: 946356a
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Fri Nov 17 15:14:01 2017 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Fri Nov 17 15:14:01 2017 +0200
----------------------------------------------------------------------
.../common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml | 2 +-
.../stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml | 2 +-
.../stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c09c69b2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
index 0b11187..89fe471 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
@@ -283,7 +283,7 @@ limitations under the License.
</property>
<property>
<name>hive.auto.convert.sortmerge.join</name>
- <value>true</value>
+ <value>false</value>
<description>Will the join be automatically converted to a sort-merge join, if the joined tables pass
the criteria for sort-merge join.
</description>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c09c69b2/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
index 1c8f475..ea6ad25 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/services/HIVE/configuration/hive-site.xml
@@ -258,7 +258,7 @@ limitations under the License.
</property>
<property>
<name>hive.auto.convert.sortmerge.join</name>
- <value>true</value>
+ <value>false</value>
<description>Will the join be automatically converted to a sort-merge join, if the joined tables pass
the criteria for sort-merge join.
</description>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c09c69b2/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
index d8cf961..15cd9f2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
@@ -692,7 +692,7 @@ limitations under the License.
</property>
<property>
<name>hive.auto.convert.sortmerge.join</name>
- <value>true</value>
+ <value>false</value>
<description>Will the join be automatically converted to a sort-merge join, if the joined tables pass the criteria for sort-merge join.</description>
<on-ambari-upgrade add="true"/>
</property>