You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/10 17:12:58 UTC
[01/24] ambari git commit: AMBARI-14956. If cluster is not deployed
login message is shown 2-3 seconds (alexantonenko)
Repository: ambari
Updated Branches:
refs/heads/branch-dev-patch-upgrade 697c309c1 -> c543ef8b2
AMBARI-14956. If cluster is not deployed login message is shown 2-3 seconds
(alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/62f3a3bb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/62f3a3bb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/62f3a3bb
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 62f3a3bb7f72c5564b45b8d1fd64a567471084c1
Parents: f60472d
Author: Alex Antonenko <hi...@gmail.com>
Authored: Mon Feb 8 15:56:28 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Mon Feb 8 17:27:01 2016 +0200
----------------------------------------------------------------------
ambari-web/app/router.js | 48 +++++++++++++++++++++++++++----------------
1 file changed, 30 insertions(+), 18 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/62f3a3bb/ambari-web/app/router.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/router.js b/ambari-web/app/router.js
index 360480f..809979e 100644
--- a/ambari-web/app/router.js
+++ b/ambari-web/app/router.js
@@ -320,22 +320,10 @@ App.Router = Em.Router.extend({
App.ajax.send({
name: 'router.login.message',
sender: self,
- success: 'showLoginMessage'
-
- });
-
- // no need to load cluster data if it's already loaded
- if (self.get('clusterData')) {
- self.loginGetClustersSuccessCallback(self.get('clusterData'), {}, requestData);
- }
- else {
- App.ajax.send({
- name: 'router.login.clusters',
- sender: self,
- data: requestData,
- success: 'loginGetClustersSuccessCallback'
- });
- }
+ data: requestData,
+ success: 'showLoginMessage',
+ error: 'showLoginMessage'
+ });
});
},
@@ -362,11 +350,12 @@ App.Router = Em.Router.extend({
* success callback of router.login.message
* @param {object} data
*/
- showLoginMessage: function (data){
+ showLoginMessage: function (data, opt, params){
var response = JSON.parse(data.Settings.content.replace(/\n/g, "\\n")),
text = response.text ? response.text : "",
buttonText = response.button ? response.button : Em.I18n.t('ok'),
- status = response.status && response.status == "true" ? true : false;
+ status = response.status && response.status == "true" ? true : false,
+ self = this;
if(text && status){
return App.ModalPopup.show({
@@ -379,9 +368,11 @@ App.Router = Em.Router.extend({
secondary: null,
onPrimary: function () {
+ self.setClusterData(data, opt, params);
this.hide();
},
onClose: function () {
+ self.setClusterData(data, opt, params);
this.hide();
},
didInsertElement: function () {
@@ -391,6 +382,27 @@ App.Router = Em.Router.extend({
}
},
+ setClusterData: function (data, opt, params) {
+ var
+ self = this,
+ requestData = {
+ loginName: params.loginName,
+ loginData: data
+ };
+ // no need to load cluster data if it's already loaded
+ if (this.get('clusterData')) {
+ this.loginGetClustersSuccessCallback(self.get('clusterData'), {}, requestData);
+ }
+ else {
+ App.ajax.send({
+ name: 'router.login.clusters',
+ sender: self,
+ data: requestData,
+ success: 'loginGetClustersSuccessCallback'
+ });
+ }
+ },
+
/**
* success callback of login request
[07/24] ambari git commit: AMBARI-14949: Ambaripreupload should skip
uploading oozie shared lib for upgrade (jluniya)
Posted by nc...@apache.org.
AMBARI-14949: Ambaripreupload should skip uploading oozie shared lib for upgrade (jluniya)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/53526383
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/53526383
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/53526383
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 53526383411f4709a6a59befeee8e68c7420cdc5
Parents: e76099a
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Mon Feb 8 14:36:00 2016 -0800
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Mon Feb 8 14:36:00 2016 -0800
----------------------------------------------------------------------
.../main/resources/scripts/Ambaripreupload.py | 31 ++++++++++++--------
1 file changed, 18 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/53526383/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 591b7d2..5a20698 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -85,8 +85,10 @@ with Environment() as env:
parser = OptionParser()
parser.add_option("-v", "--hdp-version", dest="hdp_version", default="",
help="hdp-version used in path of tarballs")
-
+ parser.add_option("-u", "--upgrade", dest="upgrade", action="store_true",
+ help="flag to indicate script is being run for upgrade", default=False)
(options, args) = parser.parse_args()
+
# See if hdfs path prefix is provided on the command line. If yes, use that value, if no
# use empty string as default.
@@ -273,19 +275,22 @@ with Environment() as env:
oozie_hdfs_user_dir = format("{hdfs_path_prefix}/user/{oozie_user}")
kinit_if_needed = ''
- params.HdfsResource(format("{oozie_hdfs_user_dir}/share/"),
- action="delete_on_execute",
- type = 'directory'
- )
+ if options.upgrade:
+ Logger.info("Skipping uploading oozie shared lib during upgrade")
+ else:
+ params.HdfsResource(format("{oozie_hdfs_user_dir}/share/"),
+ action="delete_on_execute",
+ type = 'directory'
+ )
- params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
- action="create_on_execute",
- type = 'directory',
- mode=0755,
- recursive_chmod = True,
- owner=oozie_user,
- source = oozie_shared_lib,
- )
+ params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
+ action="create_on_execute",
+ type = 'directory',
+ mode=0755,
+ recursive_chmod = True,
+ owner=oozie_user,
+ source = oozie_shared_lib,
+ )
print "Copying tarballs..."
copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/hadoop/mapreduce.tar.gz"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
[06/24] ambari git commit: AMBARI-14950. EU: Oozie service check
failed after downgrade with multiple Oozie servers since rerunning
prepare-war removes webapps/oozie folder (alejandro)
Posted by nc...@apache.org.
AMBARI-14950. EU: Oozie service check failed after downgrade with multiple Oozie servers since rerunning prepare-war removes webapps/oozie folder (alejandro)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e76099a2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e76099a2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e76099a2
Branch: refs/heads/branch-dev-patch-upgrade
Commit: e76099a275793607a86e7c34eb91c89db74021d6
Parents: f905a02
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Mon Feb 8 11:43:12 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Mon Feb 8 11:43:14 2016 -0800
----------------------------------------------------------------------
.../OOZIE/4.0.0.2.0/package/scripts/oozie.py | 71 ++++++--
.../4.0.0.2.0/package/scripts/oozie_server.py | 3 +-
.../package/scripts/oozie_server_upgrade.py | 18 +-
.../stacks/2.0.6/OOZIE/test_oozie_server.py | 172 +++++++++----------
4 files changed, 143 insertions(+), 121 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e76099a2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index 24a35ad..7591bad 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -32,10 +32,15 @@ from resource_management.libraries.script.script import Script
from resource_management.core.resources.packaging import Package
from resource_management.core.shell import as_user
from resource_management.core.shell import as_sudo
+from resource_management.core import shell
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
from ambari_commons import OSConst
from ambari_commons.inet_utils import download_file
+
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def oozie(is_server=False):
import params
@@ -191,7 +196,59 @@ def oozie_ownership():
owner = params.oozie_user,
group = params.user_group
)
-
+
+
+def prepare_war():
+ """
+ Attempt to call prepare-war command if the marker file doesn't exist or its content doesn't equal the expected command.
+ The marker file is stored in /usr/hdp/current/oozie-server/.prepare_war_cmd
+ """
+ import params
+
+ prepare_war_cmd_file = format("{oozie_home}/.prepare_war_cmd")
+
+ # DON'T CHANGE THE VALUE SINCE IT'S USED TO DETERMINE WHETHER TO RUN THE COMMAND OR NOT BY READING THE MARKER FILE.
+ # Oozie tmp dir should be /var/tmp/oozie and is already created by a function above.
+ command = format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war {oozie_secure}")
+ command = command.strip()
+
+ run_prepare_war = False
+ if os.path.exists(prepare_war_cmd_file):
+ cmd = ""
+ with open(prepare_war_cmd_file, "r") as f:
+ cmd = f.readline().strip()
+
+ if command != cmd:
+ run_prepare_war = True
+ Logger.info(format("Will run prepare war cmd since marker file {prepare_war_cmd_file} has contents which differ.\n" \
+ "Expected: {command}.\nActual: {cmd}."))
+ else:
+ run_prepare_war = True
+ Logger.info(format("Will run prepare war cmd since marker file {prepare_war_cmd_file} is missing."))
+
+ if run_prepare_war:
+ # Time-consuming to run
+ Execute(command,
+ user=params.oozie_user
+ )
+
+ return_code, output = shell.call(command, user=params.oozie_user, logoutput=False, quiet=False)
+ if output is None:
+ output = ""
+
+ if return_code != 0 or "New Oozie WAR file with added".lower() not in output.lower():
+ message = "Unexpected Oozie WAR preparation output {0}".format(output)
+ Logger.error(message)
+ raise Fail(message)
+
+ # Generate marker file
+ File(prepare_war_cmd_file,
+ content=command,
+ mode=0644,
+ )
+ else:
+ Logger.info(format("No need to run prepare-war since marker file {prepare_war_cmd_file} already exists."))
+
def oozie_server_specific():
import params
@@ -259,22 +316,12 @@ def oozie_server_specific():
not_if = no_op_test,
)
- prepare_war_cmd_file = format("{oozie_home}/.prepare_war_cmd")
- prepare_war_cmd = format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war {oozie_secure}")
- skip_prepare_war_cmd = format("test -f {prepare_war_cmd_file} && [[ `cat {prepare_war_cmd_file}` == '{prepare_war_cmd}' ]]")
+ prepare_war()
- Execute(prepare_war_cmd, # time-expensive
- user = params.oozie_user,
- not_if = format("{no_op_test} || {skip_recreate_sharelib} && {skip_prepare_war_cmd}")
- )
File(hashcode_file,
content = hashcode,
mode = 0644,
)
- File(prepare_war_cmd_file,
- content = prepare_war_cmd,
- mode = 0644,
- )
if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
# Create hive-site and tez-site configs for oozie
http://git-wip-us.apache.org/repos/asf/ambari/blob/e76099a2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
index 35975df..dc00b13 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
@@ -39,7 +39,8 @@ from oozie_service import oozie_service
from oozie_server_upgrade import OozieUpgrade
from check_oozie_server_status import check_oozie_server_status
-
+
+
class OozieServer(Script):
def get_stack_to_component(self):
http://git-wip-us.apache.org/repos/asf/ambari/blob/e76099a2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
index 326e76c..4d68f03 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
@@ -199,23 +199,7 @@ class OozieUpgrade(Script):
command = format("{kinit_path_local} -kt {oozie_keytab} {oozie_principal_with_host}")
Execute(command, user=params.oozie_user, logoutput=True)
- # setup environment
- environment = { "CATALINA_BASE" : "/usr/hdp/current/oozie-server/oozie-server",
- "OOZIE_HOME" : "/usr/hdp/current/oozie-server" }
-
- # prepare the oozie WAR
- command = format("{oozie_setup_sh} prepare-war {oozie_secure} -d {oozie_libext_dir}")
- return_code, oozie_output = shell.call(command, user=params.oozie_user,
- logoutput=False, quiet=False, env=environment)
-
- # set it to "" in to prevent a possible iteration issue
- if oozie_output is None:
- oozie_output = ""
-
- if return_code != 0 or "New Oozie WAR file with added".lower() not in oozie_output.lower():
- message = "Unexpected Oozie WAR preparation output {0}".format(oozie_output)
- Logger.error(message)
- raise Fail(message)
+ oozie.prepare_war()
def upgrade_oozie_database_and_sharelib(self, env):
http://git-wip-us.apache.org/repos/asf/ambari/blob/e76099a2/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 2183425..b9c0717 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -43,25 +43,30 @@ class TestOozieServer(RMFTestCase):
def setUp(self):
self.maxDiff = None
- def test_configure_default(self):
+ @patch.object(shell, "call")
+ def test_configure_default(self, call_mocks):
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
command = "configure",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
)
self.assert_configure_default()
self.assertNoMoreResources()
-
- def test_configure_default_mysql(self):
+ @patch.object(shell, "call")
+ def test_configure_default_mysql(self, call_mocks):
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
command = "configure",
config_file="default_oozie_mysql.json",
hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
)
self.assertResourceCalled('HdfsResource', '/user/oozie',
security_enabled = False,
@@ -253,17 +258,17 @@ class TestOozieServer(RMFTestCase):
self.assertResourceCalled('Execute', 'ambari-sudo.sh chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
)
- self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
- not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1' || test -f /usr/lib/oozie/.hashcode && test -d /usr/lib/oozie/share && [[ `cat /usr/lib/oozie/.hashcode` == 'abc123hash' ]] && test -f /usr/lib/oozie/.prepare_war_cmd && [[ `cat /usr/lib/oozie/.prepare_war_cmd` == 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ' ]]",
- user = 'oozie',
- )
- self.assertResourceCalled('File', '/usr/lib/oozie/.hashcode',
- content = 'abc123hash',
- mode = 0644,
+
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
+ user = "oozie"
)
self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
- content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
- mode = 0644,
+ content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
+ mode = 0644,
+ )
+ self.assertResourceCalled('File', '/usr/lib/oozie/.hashcode',
+ content = 'abc123hash',
+ mode = 0644,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
owner = 'oozie',
@@ -271,14 +276,16 @@ class TestOozieServer(RMFTestCase):
recursive_ownership = True,
)
-
- def test_configure_existing_sqla(self):
+ @patch.object(shell, "call")
+ def test_configure_existing_sqla(self, call_mocks):
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
command = "configure",
config_file="oozie_existing_sqla.json",
hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
)
self.assertResourceCalled('HdfsResource', '/user/oozie',
security_enabled = False,
@@ -475,18 +482,17 @@ class TestOozieServer(RMFTestCase):
self.assertResourceCalled('Execute', 'ambari-sudo.sh chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
)
- self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
- not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1' || test -f /usr/lib/oozie/.hashcode && test -d /usr/lib/oozie/share && [[ `cat /usr/lib/oozie/.hashcode` == 'abc123hash' ]] && test -f /usr/lib/oozie/.prepare_war_cmd && [[ `cat /usr/lib/oozie/.prepare_war_cmd` == 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ' ]]",
- user = 'oozie',
- )
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
+ user = "oozie"
+ )
+ self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
+ content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
+ mode = 0644,
+ )
self.assertResourceCalled('File', '/usr/lib/oozie/.hashcode',
content = 'abc123hash',
mode = 0644,
- )
- self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
- content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
- mode = 0644,
- )
+ )
self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
owner = 'oozie',
group = 'hadoop',
@@ -494,16 +500,18 @@ class TestOozieServer(RMFTestCase):
)
self.assertNoMoreResources()
-
+ @patch.object(shell, "call")
@patch("os.path.isfile")
- def test_start_default(self, isfile_mock):
+ def test_start_default(self, isfile_mock, call_mocks):
isfile_mock.return_value = True
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
command = "start",
config_file="default.json",
hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
)
self.assert_configure_default()
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
@@ -551,7 +559,6 @@ class TestOozieServer(RMFTestCase):
)
self.assertNoMoreResources()
-
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
@@ -570,27 +577,32 @@ class TestOozieServer(RMFTestCase):
)
self.assertNoMoreResources()
-
- def test_configure_secured(self):
+ @patch.object(shell, "call")
+ def test_configure_secured(self, call_mocks):
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
command = "configure",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
)
self.assert_configure_secured()
self.assertNoMoreResources()
+ @patch.object(shell, "call")
@patch("os.path.isfile")
- def test_start_secured(self, isfile_mock):
+ def test_start_secured(self, isfile_mock, call_mocks):
isfile_mock.return_value = True
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
classname = "OozieServer",
command = "start",
config_file="secured.json",
hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
)
self.assert_configure_secured()
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/ooziedb.sh create -sqlfile oozie.sql -run',
@@ -660,7 +672,6 @@ class TestOozieServer(RMFTestCase):
)
self.assertNoMoreResources()
-
def assert_configure_default(self):
self.assertResourceCalled('HdfsResource', '/user/oozie',
security_enabled = False,
@@ -834,17 +845,17 @@ class TestOozieServer(RMFTestCase):
self.assertResourceCalled('Execute', 'ambari-sudo.sh chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
)
- self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
- not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1' || test -f /usr/lib/oozie/.hashcode && test -d /usr/lib/oozie/share && [[ `cat /usr/lib/oozie/.hashcode` == 'abc123hash' ]] && test -f /usr/lib/oozie/.prepare_war_cmd && [[ `cat /usr/lib/oozie/.prepare_war_cmd` == 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ' ]]",
- user = 'oozie',
+
+ self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
+ user="oozie")
+
+ self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
+ content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war',
+ mode = 0644,
)
self.assertResourceCalled('File', '/usr/lib/oozie/.hashcode',
- content = 'abc123hash',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
- content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war ',
- mode = 0644,
+ content = 'abc123hash',
+ mode = 0644,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
owner = 'oozie',
@@ -852,7 +863,6 @@ class TestOozieServer(RMFTestCase):
recursive_ownership = True,
)
-
def assert_configure_secured(self):
self.assertResourceCalled('HdfsResource', '/user/oozie',
security_enabled = True,
@@ -1028,17 +1038,17 @@ class TestOozieServer(RMFTestCase):
self.assertResourceCalled('Execute', 'ambari-sudo.sh chown oozie:hadoop /usr/lib/oozie/libext/falcon-oozie-el-extension-*.jar',
not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1'",
)
+
self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure',
- not_if = "ambari-sudo.sh su oozie -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ls /var/run/oozie/oozie.pid >/dev/null 2>&1 && ps -p `cat /var/run/oozie/oozie.pid` >/dev/null 2>&1' || test -f /usr/lib/oozie/.hashcode && test -d /usr/lib/oozie/share && [[ `cat /usr/lib/oozie/.hashcode` == 'abc123hash' ]] && test -f /usr/lib/oozie/.prepare_war_cmd && [[ `cat /usr/lib/oozie/.prepare_war_cmd` == 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure' ]]",
- user = 'oozie',
+ user="oozie")
+
+ self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
+ content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure',
+ mode = 0644,
)
self.assertResourceCalled('File', '/usr/lib/oozie/.hashcode',
- content = 'abc123hash',
- mode = 0644,
- )
- self.assertResourceCalled('File', '/usr/lib/oozie/.prepare_war_cmd',
- content = 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-setup.sh prepare-war -secure',
- mode = 0644,
+ content = 'abc123hash',
+ mode = 0644,
)
self.assertResourceCalled('Directory', '/var/lib/oozie/oozie-server',
owner = 'oozie',
@@ -1046,43 +1056,23 @@ class TestOozieServer(RMFTestCase):
recursive_ownership = True,
)
- def test_configure_default_hdp22(self):
- config_file = "stacks/2.0.6/configs/default.json"
- with open(config_file, "r") as f:
- default_json = json.load(f)
-
- default_json['hostLevelParams']['stack_version']= '2.2'
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
- classname = "OozieServer",
- command = "configure",
- config_file="default.json",
- hdp_stack_version = self.STACK_VERSION,
- target = RMFTestCase.TARGET_COMMON_SERVICES
- )
- self.assert_configure_default()
- self.assertResourceCalled('Directory', '/etc/oozie/conf/action-conf/hive',
- owner = 'oozie',
- group = 'hadoop',
- create_parents = True
- )
- self.assertResourceCalled('XmlConfig', 'hive-site',
- owner = 'oozie',
- group = 'hadoop',
- mode = 0664,
- conf_dir = '/etc/oozie/conf/action-conf/hive',
- configurations = self.getConfig()['configurations']['hive-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['hive-site']
- )
- self.assertResourceCalled('XmlConfig', 'tez-site',
- owner = 'oozie',
- group = 'hadoop',
- mode = 0664,
- conf_dir = '/etc/oozie/conf/action-conf/hive',
- configurations = self.getConfig()['configurations']['tez-site'],
- configuration_attributes = self.getConfig()['configuration_attributes']['tez-site']
- )
- self.assertNoMoreResources()
+ @patch.object(shell, "call")
+ def test_configure_default_hdp22(self, call_mocks):
+ call_mocks = MagicMock(return_value=(0, "New Oozie WAR file with added"))
+ config_file = "stacks/2.0.6/configs/default.json"
+ with open(config_file, "r") as f:
+ default_json = json.load(f)
+ default_json['hostLevelParams']['stack_version']= '2.2'
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
+ classname = "OozieServer",
+ command = "configure",
+ config_file="default.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES,
+ call_mocks = call_mocks
+ )
+ self.assert_configure_default()
@patch("resource_management.libraries.functions.security_commons.build_expectations")
@patch("resource_management.libraries.functions.security_commons.get_params_from_filesystem")
@@ -1619,4 +1609,4 @@ class TestOozieServer(RMFTestCase):
self.assertEquals(
('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', '--package', 'oozie', '--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
- mocks_dict['call'].call_args_list[0][0][0])
+ mocks_dict['call'].call_args_list[0][0][0])
\ No newline at end of file
[24/24] ambari git commit: Merge branch 'trunk' into
branch-dev-patch-upgrade
Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c543ef8b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c543ef8b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c543ef8b
Branch: refs/heads/branch-dev-patch-upgrade
Commit: c543ef8b23f75c828d87e6d0fc4866b567dd5dd7
Parents: 697c309 46bbbf9
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Feb 10 11:12:38 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Feb 10 11:12:38 2016 -0500
----------------------------------------------------------------------
.../stackVersions/StackVersionsCreateCtrl.js | 7 +-
.../main/python/ambari_agent/PythonExecutor.py | 5 +-
ambari-metrics/ambari-metrics-assembly/pom.xml | 12 +-
.../server/controller/AuthToLocalBuilder.java | 287 ++++++++++-------
.../server/controller/KerberosHelperImpl.java | 23 +-
.../internal/ServiceResourceProvider.java | 197 ++++++------
.../ambari/server/state/quicklinks/Link.java | 19 +-
.../main/python/ambari_server/serverSetup.py | 17 -
ambari-server/src/main/python/bootstrap.py | 4 +-
.../0.1.0/package/scripts/metrics_grafana.py | 2 +
.../package/scripts/metrics_grafana_util.py | 131 ++++++++
.../metrics_grafana_datasource.json.j2 | 33 ++
.../FALCON/0.5.0.2.1/quicklinks/quicklinks.json | 1 -
.../1.10.3-10/configuration/kerberos-env.xml | 14 +
.../OOZIE/4.0.0.2.0/package/scripts/oozie.py | 71 ++++-
.../4.0.0.2.0/package/scripts/oozie_server.py | 3 +-
.../package/scripts/oozie_server_upgrade.py | 18 +-
.../PXF/3.0.0/package/scripts/params.py | 17 +-
.../PXF/3.0.0/package/scripts/service_check.py | 81 +++--
.../RANGER/0.4.0/package/scripts/params.py | 2 +
.../0.4.0/package/scripts/ranger_admin.py | 50 +--
.../0.4.0/package/scripts/service_check.py | 12 +-
.../0.4.0/package/scripts/status_params.py | 27 ++
.../STORM/0.9.1.2.1/quicklinks/quicklinks.json | 1 -
.../main/resources/scripts/Ambaripreupload.py | 31 +-
.../HDP/2.0.6/services/HBASE/metainfo.xml | 7 +
.../services/HBASE/quicklinks/quicklinks.json | 97 ++++++
.../stacks/HDP/2.0.6/services/HDFS/metainfo.xml | 7 +
.../services/HDFS/quicklinks/quicklinks.json | 76 +++++
.../HDP/2.0.6/services/OOZIE/metainfo.xml | 5 +-
.../services/OOZIE/quicklinks/quicklinks.json | 1 -
.../stacks/HDP/2.0.6/services/YARN/metainfo.xml | 13 +
.../YARN/quicklinks-mapred/quicklinks.json | 76 +++++
.../services/YARN/quicklinks/quicklinks.json | 76 +++++
.../main/resources/stacks/HDP/2.1/metainfo.xml | 2 +-
.../services/RANGER/quicklinks/quicklinks.json | 1 -
.../services/RANGER/themes/theme_version_1.json | 20 +-
.../services/SPARK/quicklinks/quicklinks.json | 1 -
.../ACCUMULO/quicklinks/quicklinks.json | 1 -
.../services/ATLAS/quicklinks/quicklinks.json | 1 -
.../services/HBASE/quicklinks/quicklinks.json | 6 -
.../services/HDFS/quicklinks/quicklinks.json | 4 -
.../services/OOZIE/quicklinks/quicklinks.json | 1 -
.../services/RANGER/quicklinks/quicklinks.json | 1 -
.../services/RANGER/themes/theme_version_2.json | 40 ++-
.../services/SPARK/quicklinks/quicklinks.json | 1 -
.../YARN/quicklinks-mapred/quicklinks.json | 4 -
.../services/YARN/quicklinks/quicklinks.json | 4 -
.../controller/AuthToLocalBuilderTest.java | 315 ++++++++++++-------
.../AMBARI_METRICS/test_metrics_grafana.py | 19 +-
.../stacks/2.0.6/OOZIE/test_oozie_server.py | 172 +++++-----
.../resources/child_quicklinks_to_merge.json | 3 -
.../resources/child_quicklinks_to_override.json | 4 -
.../src/test/resources/parent_quicklinks.json | 4 -
.../assets/data/configurations/quicklinks.json | 4 -
.../app/controllers/global/update_controller.js | 8 +-
.../manage_alert_notifications_controller.js | 74 +++--
.../app/controllers/wizard/step4_controller.js | 122 +++++--
ambari-web/app/data/HDP2/site_properties.js | 7 +
ambari-web/app/router.js | 59 ++--
.../app/views/common/quick_view_link_view.js | 10 +-
ambari-web/app/views/main/dashboard/widgets.js | 5 +
...anage_alert_notifications_controller_test.js | 121 ++++---
.../test/controllers/wizard/step4_test.js | 40 ++-
.../views/common/log_file_search_view_test.js | 3 +-
.../test/views/main/dashboard/widgets_test.js | 42 ++-
66 files changed, 1730 insertions(+), 792 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c543ef8b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --cc ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index df344e6,532e5f4..931b7ec
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@@ -27,167 -29,38 +27,166 @@@ angular.module('ambariAdminConsole'
$scope.clusterName = $routeParams.clusterName;
$scope.subversionPattern = /^\d+\.\d+(-\d+)?$/;
$scope.upgradeStack = {
- selected: null,
- options: []
- };
- $scope.fetchStackVersionFilterList = function () {
- return Stack.allStackVersions()
- .then(function (allStackVersions) {
- var versions = [];
- angular.forEach(allStackVersions, function (version) {
- if (version.upgrade_packs.length > 0 && version.active) {
- versions.push(version);
- }
+ stack_name: '',
+ stack_version: '',
+ display_name: ''
+ };
+
+ $scope.option1 = {
+ index: 1,
+ displayName: 'Upload Version Definition File',
+ url: 'files://',
+ hasError: false
+ };
+ $scope.option2 = {
+ index: 2,
+ displayName: 'Version Definition File URL',
+ url: 'https://',
+ hasError: false
+ };
+ $scope.selectedOption = 1;
+
+ /**
+ * User can select ONLY one option to upload version definition file
+ */
+ $scope.toggleOptionSelect = function () {
+ $scope.option1.hasError = false;
+ $scope.option2.hasError = false;
+ };
+ $scope.clearOptionsError = function () {
+ $scope.option1.hasError = false;
+ $scope.option2.hasError = false;
+ };
+ $scope.readInfoButtonDisabled = function () {
+ return $scope.option1.selected ? !$scope.option1.url : !$scope.option2.url;
+ };
+
+ $scope.onFileSelect = function(){
+ return {
+ link: function($scope,el){
+ el.bind("change", function(e){
+ $scope.file = (e.srcElement || e.target).files[0];
+ $scope.getFile();
+ })
+ }
+ }
+ };
+
+// $scope.uploadFile = function(){
+// var file = $scope.myFile;
+// console.log('file is ' );
+// console.dir(file);
+// var uploadUrl = "/fileUpload";
+// fileUpload.uploadFileToUrl(file, uploadUrl);
+// };
+
+ /**
+ * Load selected file to current page content
+ */
+ $scope.readVersionInfo = function(){
+ if ($scope.option2.selected) {
+ var url = $scope.option2.url;
+ }
+ /// POST url first then get the version definition info
+ return Stack.getLatestRepo('HDP').then(function (response) {
+ $scope.id = response.id;
+ $scope.isPatch = response.type == 'PATCH';
+ $scope.stackNameVersion = response.stackNameVersion || 'n/a';
+ $scope.displayName = response.displayName || 'n/a';
+ $scope.version = response.version || 'n/a';
+ $scope.actualVersion = response.actualVersion || 'n/a';
+ $scope.upgradeStack = {
+ stack_name: response.stackName,
+ stack_version: response.stackVersion,
+ display_name: response.displayName
+ };
+ $scope.services = response.services || [];
+ //save default values of repos to check if they were changed
+ $scope.defaulfOSRepos = {};
+ response.updateObj.operating_systems.forEach(function(os) {
+ $scope.defaulfOSRepos[os.OperatingSystems.os_type] = {
+ defaultBaseUrl: os.repositories[0].Repositories.base_url,
+ defaultUtilsUrl: os.repositories[1].Repositories.base_url
+ };
});
- $scope.upgradeStack.options = versions;
- $scope.upgradeStack.selected = versions[versions.length - 1];
- $scope.afterStackVersionChange().then(function(){
- $scope.disableUnusedOS();
+ $scope.repoVersionFullName = response.repoVersionFullName;
+ angular.forEach(response.osList, function (os) {
+ os.selected = true;
});
- })
- .catch(function (data) {
- Alert.error($t('versions.alerts.filterListError'), data.message);
+ $scope.osList = response.osList;
+ // load supported os type base on stack version
+ $scope.afterStackVersionRead();
});
};
- $scope.fetchStackVersionFilterList();
+
+ /**
+ * Load supported OS list
+ */
+ $scope.afterStackVersionRead = function () {
+ Stack.getSupportedOSList($scope.upgradeStack.stack_name, $scope.upgradeStack.stack_version)
+ .then(function (data) {
+ var operatingSystems = data.operating_systems;
+ operatingSystems.map(function (os) {
+ var existingOSHash = {};
+ angular.forEach($scope.osList, function (os) {
+ existingOSHash[os.OperatingSystems.os_type] = os;
+ });
+ // if os not in the list, mark as un-selected, add this to the osList
+ if (!existingOSHash[os.OperatingSystems.os_type]) {
+ os.selected = false;
+ os.repositories.forEach(function(repo) {
+ repo.Repositories.base_url = '';
+ });
+ $scope.osList.push(os);
+ }
+ });
+ })
+ .catch(function (data) {
+ Alert.error($t('versions.alerts.osListError'), data.message);
+ });
+ };
+
+ /**
+ * On click handler for removing OS
+ */
+ $scope.removeOS = function() {
+ this.os.selected = false;
+ if (this.os.repositories) {
+ this.os.repositories.forEach(function(repo) {
+ repo.hasError = false;
+ });
+ }
+ };
+ /**
+ * On click handler for adding new OS
+ */
+ $scope.addOS = function() {
+ this.os.selected = true;
+ if (this.os.repositories) {
+ this.os.repositories.forEach(function(repo) {
+ repo.hasError = false;
+ });
+ }
+ };
+
+ $scope.isSaveButtonDisabled = function() {
+ var enabled = false;
+ $scope.osList.forEach(function(os) {
+ if (os.selected) {
+ enabled = true
+ }
+ });
+ return !enabled;
+ }
$scope.save = function () {
- return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack.selected).then(function (invalidUrls) {
+ return Stack.validateBaseUrls($scope.skipValidation, $scope.osList, $scope.upgradeStack).then(function (invalidUrls) {
if (invalidUrls.length === 0) {
- Stack.addRepo($scope.upgradeStack.selected, $scope.repoSubversion, $scope.osList)
+ Stack.addRepo($scope.upgradeStack, $scope.actualVersion, $scope.osList)
.success(function () {
- Alert.success($t('versions.alerts.versionCreated', {
- stackName: $scope.upgradeStack.stack_name,
- versionName: $scope.actualVersion
- }));
+ var versionName = $scope.upgradeStack.selected.stack_version + '.' + $scope.repoSubversion;
+ var stackName = $scope.upgradeStack.selected.stack_name;
+ Alert.success($t('versions.alerts.versionCreated', {stackName: stackName, versionName: versionName}));
$location.path('/stackVersions');
})
.error(function (data) {
[22/24] ambari git commit: AMBARI-14959: Implement service check for
secured PXF service (lavjain via jaoki)
Posted by nc...@apache.org.
AMBARI-14959: Implement service check for secured PXF service (lavjain via jaoki)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/feb50e3a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/feb50e3a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/feb50e3a
Branch: refs/heads/branch-dev-patch-upgrade
Commit: feb50e3a3f5edb1105780d06254bb9538d19063e
Parents: 73fbe14
Author: Jun Aoki <ja...@apache.org>
Authored: Tue Feb 9 15:33:08 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Tue Feb 9 15:33:08 2016 -0800
----------------------------------------------------------------------
.../PXF/3.0.0/package/scripts/params.py | 17 +++-
.../PXF/3.0.0/package/scripts/service_check.py | 81 +++++++++++++++-----
2 files changed, 76 insertions(+), 22 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/feb50e3a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index 7749de7..b3e85e4 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -22,6 +22,7 @@ from resource_management import Script
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.functions.namenode_ha_utils import get_active_namenode
config = Script.get_config()
@@ -31,9 +32,10 @@ stack_name = str(config["hostLevelParams"]["stack_name"])
# Users and Groups
pxf_user = "pxf"
pxf_group = pxf_user
-hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hdfs_superuser_group = config["configurations"]["hdfs-site"]["dfs.permissions.superusergroup"]
user_group = config["configurations"]["cluster-env"]["user_group"]
+hbase_user = default('configurations/hbase-env/hbase_user', None)
hive_user = default('configurations/hive-env/hive_user', None)
tomcat_group = "tomcat"
@@ -60,14 +62,21 @@ is_hive_installed = default("/clusterHostInfo/hive_server_host", None) is not No
# HDFS
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
+namenode_path = default('/configurations/hdfs-site/dfs.namenode.http-address', None)
+dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
+if dfs_nameservice:
+ namenode_path = get_active_namenode(hdfs_site, security_enabled, hdfs_user)[1]
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+# keytabs and principals
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+hdfs_user_keytab = default('configurations/hadoop-env/hdfs_user_keytab', None)
+hdfs_principal_name = default('configurations/hadoop-env/hdfs_principal_name', None)
+hbase_user_keytab = default('configurations/hbase-env/hbase_user_keytab', None)
+hbase_principal_name = default('configurations/hbase-env/hbase_principal_name', None)
# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
- user=hdfs_superuser,
+ user=hdfs_user,
security_enabled=security_enabled,
keytab=hdfs_user_keytab,
kinit_path_local=kinit_path_local,
http://git-wip-us.apache.org/repos/asf/ambari/blob/feb50e3a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
index 064be04..21b7c5d 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/service_check.py
@@ -15,15 +15,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
+import json
+
from resource_management.libraries.script import Script
from resource_management.core.exceptions import Fail
from resource_management.core.logger import Logger
from resource_management.core.system import System
from resource_management.core.resources.system import Execute
-
+from resource_management.core.environment import Environment
+from resource_management.libraries.functions.curl_krb_request import curl_krb_request
from pxf_utils import makeHTTPCall, runLocalCmd
import pxf_constants
+
class PXFServiceCheck(Script):
"""
Runs a set of simple PXF tests to verify if the service has been setup correctly
@@ -44,11 +48,19 @@ class PXFServiceCheck(Script):
def service_check(self, env):
- Logger.info("Starting PXF service checks..")
-
+ """
+ Runs the service check for PXF
+ """
import params
- self.pxf_version = self.__get_pxf_protocol_version()
+ Logger.info("Starting PXF service checks..")
try:
+ # Get delegation token if security is enabled
+ if params.security_enabled:
+ token = self.__get_delegation_token(params.hdfs_user, params.hdfs_user_keytab,
+ params.hdfs_principal_name, params.kinit_path_local)
+ self.commonPXFHeaders.update({"X-GP-TOKEN": token})
+
+ self.pxf_version = self.__get_pxf_protocol_version()
self.run_hdfs_tests()
if params.is_hbase_installed:
self.run_hbase_tests()
@@ -59,7 +71,10 @@ class PXFServiceCheck(Script):
Logger.error(msg)
raise Fail(msg)
finally:
- self.cleanup_test_data()
+ try:
+ self.cleanup_test_data()
+ except Exception as e:
+ Logger.error(e)
Logger.info("Service check completed successfully")
@@ -111,9 +126,28 @@ class PXFServiceCheck(Script):
raise
except:
msg = "PXF data read failed"
+ Logger.error(msg)
raise Fail(msg)
+ def __get_delegation_token(self, user, keytab, principal, kinit_path):
+ """
+ Gets the kerberos delegation token from name node
+ """
+ import params
+ url = params.namenode_path + "/webhdfs/v1/?op=GETDELEGATIONTOKEN"
+ Logger.info("Getting delegation token from {0}".format(url))
+ response, _, _ = curl_krb_request(Environment.get_instance().tmp_dir, keytab, principal,
+ url, "get_delegation_token", kinit_path, False, "Delegation Token", user)
+ json_response = json.loads(response)
+ if json_response['Token'] and json_response['Token']['urlString']:
+ return json_response['Token']['urlString']
+
+ msg = "Unable to get delegation token"
+ Logger.error(msg)
+ raise Fail(msg)
+
+
# HDFS Routines
def run_hdfs_tests(self):
"""
@@ -136,20 +170,20 @@ class PXFServiceCheck(Script):
type="directory",
action="create_on_execute",
mode=0777
- )
-
+ )
params.HdfsResource(pxf_constants.pxf_hdfs_read_test_file,
type="file",
source="/etc/passwd",
action="create_on_execute"
- )
+ )
+ params.HdfsResource(None, action="execute")
def __check_pxf_hdfs_read(self):
"""
Reads the test HDFS data through PXF
"""
Logger.info("Testing PXF HDFS read")
- headers = {
+ headers = {
"X-GP-DATA-DIR": pxf_constants.pxf_hdfs_test_dir,
"X-GP-profile": "HdfsTextSimple",
}
@@ -182,6 +216,7 @@ class PXFServiceCheck(Script):
raise
except:
msg = "PXF HDFS data write test failed"
+ Logger.error(msg)
raise Fail(msg)
def __cleanup_hdfs_data(self):
@@ -193,11 +228,12 @@ class PXFServiceCheck(Script):
params.HdfsResource(pxf_constants.pxf_hdfs_read_test_file,
type="file",
action="delete_on_execute"
- )
+ )
params.HdfsResource(pxf_constants.pxf_hdfs_test_dir,
type="directory",
action="delete_on_execute"
- )
+ )
+ params.HdfsResource(None, action="execute")
# HBase Routines
@@ -205,7 +241,11 @@ class PXFServiceCheck(Script):
"""
Runs a set of PXF HBase checks
"""
+ import params
Logger.info("Running PXF HBase checks")
+ if params.security_enabled:
+ Execute("{0} -kt {1} {2}".format(params.kinit_path_local, params.hbase_user_keytab, params.hbase_principal_name),
+ user = params.hbase_user)
self.__cleanup_hbase_data()
self.__check_if_client_exists("HBase")
self.__write_hbase_data()
@@ -215,9 +255,12 @@ class PXFServiceCheck(Script):
"""
Creates a temporary HBase table for the service checks
"""
+ import params
Logger.info("Creating temporary HBase test data")
- Execute("echo \"create '" + pxf_constants.pxf_hbase_test_table + "', 'cf'\"|hbase shell", logoutput = True)
- Execute("echo \"put '" + pxf_constants.pxf_hbase_test_table + "', 'row1', 'cf:a', 'value1'; put '" + pxf_constants.pxf_hbase_test_table + "', 'row1', 'cf:b', 'value2'\" | hbase shell", logoutput = True)
+ cmd = "echo \"create '{0}', 'cf'\" | hbase shell".format(pxf_constants.pxf_hbase_test_table)
+ Execute(cmd, logoutput = True, user = params.hbase_user)
+ cmd = "echo \"put '{0}', 'row1', 'cf:a', 'value1'; put '{0}', 'row1', 'cf:b', 'value2'\" | hbase shell".format(pxf_constants.pxf_hbase_test_table)
+ Execute(cmd, logoutput = True, user = params.hbase_user)
def __check_pxf_hbase_read(self):
"""
@@ -229,16 +272,18 @@ class PXFServiceCheck(Script):
"X-GP-profile": "HBase",
}
headers.update(self.commonPXFHeaders)
-
self.__check_pxf_read(headers)
def __cleanup_hbase_data(self):
"""
Cleans up the test HBase data
"""
+ import params
Logger.info("Cleaning up HBase test data")
- Execute("echo \"disable '" + pxf_constants.pxf_hbase_test_table + "'\"|hbase shell > /dev/null 2>&1", logoutput = True)
- Execute("echo \"drop '" + pxf_constants.pxf_hbase_test_table + "'\"|hbase shell > /dev/null 2>&1", logoutput = True)
+ cmd = "echo \"disable '{0}'\" | hbase shell > /dev/null 2>&1".format(pxf_constants.pxf_hbase_test_table)
+ Execute(cmd, logoutput = True, user = params.hbase_user)
+ cmd = "echo \"drop '{0}'\" | hbase shell > /dev/null 2>&1".format(pxf_constants.pxf_hbase_test_table)
+ Execute(cmd, logoutput = True, user = params.hbase_user)
# Hive Routines
@@ -259,7 +304,7 @@ class PXFServiceCheck(Script):
import params
Logger.info("Creating temporary Hive test data")
cmd = "hive -e 'CREATE TABLE IF NOT EXISTS {0} (id INT); INSERT INTO {0} VALUES (1);'".format(pxf_constants.pxf_hive_test_table)
- Execute(cmd, logoutput = True, user = params.hive_user)
+ Execute(cmd, logoutput = True, user = params.hdfs_user)
def __check_pxf_hive_read(self):
"""
@@ -280,7 +325,7 @@ class PXFServiceCheck(Script):
import params
Logger.info("Cleaning up Hive test data")
cmd = "hive -e 'DROP TABLE IF EXISTS {0};'".format(pxf_constants.pxf_hive_test_table)
- Execute(cmd, logoutput = True, user = params.hive_user)
+ Execute(cmd, logoutput = True, user = params.hdfs_user)
# Package Routines
[02/24] ambari git commit: AMBARI-14403: Improve Quicklink logic to
remove any unnecessary legacy code (dili)
Posted by nc...@apache.org.
AMBARI-14403: Improve Quicklink logic to remove any unnecessary legacy code (dili)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5cb0fadd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5cb0fadd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5cb0fadd
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5cb0faddaee06332ab703c237a7514742fbc3588
Parents: 62f3a3b
Author: Di Li <di...@apache.org>
Authored: Mon Feb 8 11:30:26 2016 -0500
Committer: Di Li <di...@apache.org>
Committed: Mon Feb 8 11:30:26 2016 -0500
----------------------------------------------------------------------
.../ambari/server/state/quicklinks/Link.java | 19 +---
.../FALCON/0.5.0.2.1/quicklinks/quicklinks.json | 1 -
.../STORM/0.9.1.2.1/quicklinks/quicklinks.json | 1 -
.../HDP/2.0.6/services/HBASE/metainfo.xml | 7 ++
.../services/HBASE/quicklinks/quicklinks.json | 97 ++++++++++++++++++++
.../stacks/HDP/2.0.6/services/HDFS/metainfo.xml | 7 ++
.../services/HDFS/quicklinks/quicklinks.json | 76 +++++++++++++++
.../HDP/2.0.6/services/OOZIE/metainfo.xml | 5 +-
.../services/OOZIE/quicklinks/quicklinks.json | 1 -
.../stacks/HDP/2.0.6/services/YARN/metainfo.xml | 13 +++
.../YARN/quicklinks-mapred/quicklinks.json | 76 +++++++++++++++
.../services/YARN/quicklinks/quicklinks.json | 76 +++++++++++++++
.../services/RANGER/quicklinks/quicklinks.json | 1 -
.../services/SPARK/quicklinks/quicklinks.json | 1 -
.../ACCUMULO/quicklinks/quicklinks.json | 1 -
.../services/ATLAS/quicklinks/quicklinks.json | 1 -
.../services/HBASE/quicklinks/quicklinks.json | 6 --
.../services/HDFS/quicklinks/quicklinks.json | 4 -
.../services/OOZIE/quicklinks/quicklinks.json | 1 -
.../services/RANGER/quicklinks/quicklinks.json | 1 -
.../services/SPARK/quicklinks/quicklinks.json | 1 -
.../YARN/quicklinks-mapred/quicklinks.json | 4 -
.../services/YARN/quicklinks/quicklinks.json | 4 -
.../resources/child_quicklinks_to_merge.json | 3 -
.../resources/child_quicklinks_to_override.json | 4 -
.../src/test/resources/parent_quicklinks.json | 4 -
.../assets/data/configurations/quicklinks.json | 4 -
.../app/views/common/quick_view_link_view.js | 10 +-
28 files changed, 360 insertions(+), 69 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
index a0890f1..8800b3a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/quicklinks/Link.java
@@ -37,9 +37,6 @@ public class Link{
@JsonProperty("url")
private String url;
- @JsonProperty("template")
- private String template;
-
@JsonProperty("port")
private Port port;
@@ -67,14 +64,6 @@ public class Link{
this.url = url;
}
- public String getTemplate() {
- return template;
- }
-
- public void setTemplate(String template) {
- this.template = template;
- }
-
public String getRequiresUserName() {
return requiresUserName;
}
@@ -93,7 +82,7 @@ public class Link{
public boolean isRemoved(){
//treat a link as removed if the section only contains a name
- return (null == port && null == url && null == template && null == label && null == requiresUserName);
+ return (null == port && null == url && null == label && null == requiresUserName);
}
public void mergeWithParent(Link parentLink) {
@@ -103,18 +92,12 @@ public class Link{
/* merge happens when a child link has some infor but not all of them.
* If a child link has nothing but a name, it's treated as being removed from the link list
*/
- if(null == template && null != parentLink.getTemplate())
- template = parentLink.getTemplate();
-
if(null == label && null != parentLink.getLabel())
label = parentLink.getLabel();
if(null == url && null != parentLink.getUrl())
url = parentLink.getUrl();
- if(null == template && null != parentLink.getTemplate())
- template = parentLink.getTemplate();
-
if(null == requiresUserName && null != parentLink.getRequiresUserName())
requiresUserName = parentLink.getRequiresUserName();
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json
index bbea282..54acb76 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json
@@ -13,7 +13,6 @@
"label": "Falcon Web UI",
"requires_user_name": "true",
"url":"%@://%@:%@/index.html?user.name=%@",
- "template":"%@://%@:%@/index.html?user.name=%@",
"port":{
"http_property": "falcon_port",
"http_default_port": "15000",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/quicklinks/quicklinks.json b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/quicklinks/quicklinks.json
index dd17275..b6281f7 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/quicklinks/quicklinks.json
@@ -13,7 +13,6 @@
"label": "Storm UI",
"requires_user_name": "false",
"url":"%@://%@:%@/",
- "template":"%@://%@:%@/",
"port":{
"http_property": "ui.port",
"http_default_port": "8744",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
index 194f79e..25303cd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/metainfo.xml
@@ -21,6 +21,13 @@
<service>
<name>HBASE</name>
<extends>common-services/HBASE/0.96.0.2.0</extends>
+
+ <quickLinksConfigurations>
+ <quickLinksConfiguration>
+ <fileName>quicklinks.json</fileName>
+ <default>true</default>
+ </quickLinksConfiguration>
+ </quickLinksConfigurations>
</service>
</services>
</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/quicklinks/quicklinks.json
new file mode 100644
index 0000000..5568122
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/quicklinks/quicklinks.json
@@ -0,0 +1,97 @@
+{
+ "name": "default",
+ "description": "default quick links configuration",
+ "configuration": {
+ "protocol":
+ {
+ "type":"http"
+ },
+
+ "links": [
+ {
+ "name": "hbase_master_ui",
+ "label": "HBase Master UI",
+ "url":"%@://%@:%@/master-status",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "hbase.master.info.port",
+ "http_default_port": "60010",
+ "https_property": "hbase.master.info.port",
+ "https_default_port": "60443",
+ "regex": "",
+ "site": "hbase-site"
+ }
+ },
+ {
+ "name": "hbase_logs",
+ "label": "HBase Logs",
+ "url":"%@://%@:%@/logs",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "hbase.master.info.port",
+ "http_default_port": "60010",
+ "https_property": "hbase.master.info.port",
+ "https_default_port": "60443",
+ "regex": "",
+ "site": "hbase-site"
+ }
+ },
+ {
+ "name": "zookeeper_info",
+ "label": "Zookeeper Info",
+ "url":"%@://%@:%@/zk.jsp",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "hbase.master.info.port",
+ "http_default_port": "60010",
+ "https_property": "hbase.master.info.port",
+ "https_default_port": "60443",
+ "regex": "",
+ "site": "hbase-site"
+ }
+ },
+ {
+ "name": "hbase_master_jmx",
+ "label": "HBase Master JMX",
+ "url":"%@://%@:%@/jmx",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "hbase.master.info.port",
+ "http_default_port": "60010",
+ "https_property": "hbase.master.info.port",
+ "https_default_port": "60443",
+ "regex": "",
+ "site": "hbase-site"
+ }
+ },
+ {
+ "name": "debug_dump",
+ "label": "Debug Dump",
+ "url":"%@://%@:%@/dump",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "hbase.master.info.port",
+ "http_default_port": "60010",
+ "https_property": "hbase.master.info.port",
+ "https_default_port": "60443",
+ "regex": "",
+ "site": "hbase-site"
+ }
+ },
+ {
+ "name": "thread_stacks",
+ "label": "Thread Stacks",
+ "url":"%@://%@:%@/stacks",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "hbase.master.info.port",
+ "http_default_port": "60010",
+ "https_property": "hbase.master.info.port",
+ "https_default_port": "60443",
+ "regex": "",
+ "site": "hbase-site"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
index a5d6862..410ff95 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/metainfo.xml
@@ -21,6 +21,13 @@
<service>
<name>HDFS</name>
<extends>common-services/HDFS/2.1.0.2.0</extends>
+
+ <quickLinksConfigurations>
+ <quickLinksConfiguration>
+ <fileName>quicklinks.json</fileName>
+ <default>true</default>
+ </quickLinksConfiguration>
+ </quickLinksConfigurations>
</service>
</services>
</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/quicklinks/quicklinks.json
new file mode 100644
index 0000000..a4216e3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/quicklinks/quicklinks.json
@@ -0,0 +1,76 @@
+{
+ "name": "default",
+ "description": "default quick links configuration",
+ "configuration": {
+ "protocol":
+ {
+ "type":"https",
+ "checks":[
+ {
+ "property":"dfs.http.policy",
+ "desired":"HTTPS_ONLY",
+ "site":"hdfs-site"
+ }
+ ]
+ },
+
+ "links": [
+ {
+ "name": "namenode_ui",
+ "label": "NameNode UI",
+ "url":"%@://%@:%@",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "dfs.namenode.http-address",
+ "http_default_port": "50070",
+ "https_property": "dfs.namenode.https-address",
+ "https_default_port": "50470",
+ "regex": "\\w*:(\\d+)",
+ "site": "hdfs-site"
+ }
+ },
+ {
+ "name": "namenode_logs",
+ "label": "NameNode Logs",
+ "url":"%@://%@:%@/logs",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "dfs.namenode.http-address",
+ "http_default_port": "50070",
+ "https_property": "dfs.namenode.https-address",
+ "https_default_port": "50470",
+ "regex": "\\w*:(\\d+)",
+ "site": "hdfs-site"
+ }
+ },
+ {
+ "name": "namenode_jmx",
+ "label": "NameNode JMX",
+ "url":"%@://%@:%@/jmx",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "dfs.namenode.http-address",
+ "http_default_port": "50070",
+ "https_property": "dfs.namenode.https-address",
+ "https_default_port": "50470",
+ "regex": "\\w*:(\\d+)",
+ "site": "hdfs-site"
+ }
+ },
+ {
+ "name": "Thread Stacks",
+ "label": "Thread Stacks",
+ "url":"%@://%@:%@/stacks",
+ "requires_user_name": "false",
+ "port":{
+ "http_property": "dfs.namenode.http-address",
+ "http_default_port": "50070",
+ "https_property": "dfs.namenode.https-address",
+ "https_default_port": "50470",
+ "regex": "\\w*:(\\d+)",
+ "site": "hdfs-site"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
index 8b24091..90e1cc5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/metainfo.xml
@@ -21,13 +21,12 @@
<service>
<name>OOZIE</name>
<extends>common-services/OOZIE/4.0.0.2.0</extends>
- </service>
-
- <quickLinksConfigurations>
+ <quickLinksConfigurations>
<quickLinksConfiguration>
<fileName>quicklinks.json</fileName>
<default>true</default>
</quickLinksConfiguration>
</quickLinksConfigurations>
+ </service>
</services>
</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/quicklinks/quicklinks.json
index e7c23a4..621fe8a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/quicklinks/quicklinks.json
@@ -30,7 +30,6 @@
"label": "Oozie Web UI",
"requires_user_name": "true",
"url":"%@://%@:%@/oozie?user.name=%@",
- "template":"%@://%@:%@/oozie?user.name=%@",
"port":{
"http_property": "oozie.base.url",
"http_default_port": "11000",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
index 7a1c715..59524be 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/metainfo.xml
@@ -22,10 +22,23 @@
<service>
<name>YARN</name>
<extends>common-services/YARN/2.1.0.2.0</extends>
+ <quickLinksConfigurations>
+ <quickLinksConfiguration>
+ <fileName>quicklinks.json</fileName>
+ <default>true</default>
+ </quickLinksConfiguration>
+ </quickLinksConfigurations>
</service>
<service>
<name>MAPREDUCE2</name>
<extends>common-services/MAPREDUCE2/2.1.0.2.0.6.0</extends>
+ <quickLinksConfigurations-dir>quicklinks-mapred</quickLinksConfigurations-dir>
+ <quickLinksConfigurations>
+ <quickLinksConfiguration>
+ <fileName>quicklinks.json</fileName>
+ <default>true</default>
+ </quickLinksConfiguration>
+ </quickLinksConfigurations>
</service>
</services>
</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks-mapred/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks-mapred/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks-mapred/quicklinks.json
new file mode 100644
index 0000000..36f71b5
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks-mapred/quicklinks.json
@@ -0,0 +1,76 @@
+{
+ "name": "default",
+ "description": "default quick links configuration",
+ "configuration": {
+ "protocol":
+ {
+ "type":"https",
+ "checks":[
+ {
+ "property":"mapreduce.jobhistory.http.policy",
+ "desired":"HTTPS_ONLY",
+ "site":"mapred-site"
+ }
+ ]
+ },
+
+ "links": [
+ {
+ "name": "jobhistory_ui",
+ "label": "JobHistory UI",
+ "requires_user_name": "false",
+ "url": "%@://%@:%@",
+ "port":{
+ "http_property": "mapreduce.jobhistory.webapp.address",
+ "http_default_port": "19888",
+ "https_property": "mapreduce.jobhistory.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "mapred-site"
+ }
+ },
+ {
+ "name": "jobhistory_logs",
+ "label": "JobHistory logs",
+ "requires_user_name": "false",
+ "url": "%@://%@:%@/logs",
+ "port":{
+ "http_property": "mapreduce.jobhistory.webapp.address",
+ "http_default_port": "19888",
+ "https_property": "mapreduce.jobhistory.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "mapred-site"
+ }
+ },
+ {
+ "name":"jobhistory_jmx",
+ "label":"JobHistory JMX",
+ "requires_user_name":"false",
+ "url":"%@://%@:%@/jmx",
+ "port":{
+ "http_property": "mapreduce.jobhistory.webapp.address",
+ "http_default_port": "19888",
+ "https_property": "mapreduce.jobhistory.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "mapred-site"
+ }
+ },
+ {
+ "name":"thread_stacks",
+ "label":"Thread Stacks",
+ "requires_user_name": "false",
+ "url":"%@://%@:%@/stacks",
+ "port":{
+ "http_property": "mapreduce.jobhistory.webapp.address",
+ "http_default_port": "19888",
+ "https_property": "mapreduce.jobhistory.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "mapred-site"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks/quicklinks.json
new file mode 100644
index 0000000..101be6d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/quicklinks/quicklinks.json
@@ -0,0 +1,76 @@
+{
+ "name": "default",
+ "description": "default quick links configuration",
+ "configuration": {
+ "protocol":
+ {
+ "type":"https",
+ "checks":[
+ {
+ "property":"yarn.http.policy",
+ "desired":"HTTPS_ONLY",
+ "site":"yarn-site"
+ }
+ ]
+ },
+
+ "links": [
+ {
+ "name": "resourcemanager_ui",
+ "label": "ResourceManager UI",
+ "requires_user_name": "false",
+ "url": "%@://%@:%@",
+ "port":{
+ "http_property": "yarn.timeline-service.webapp.address",
+ "http_default_port": "8088",
+ "https_property": "yarn.timeline-service.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "yarn-site"
+ }
+ },
+ {
+ "name": "resourcemanager_logs",
+ "label": "ResourceManager logs",
+ "requires_user_name": "false",
+ "url": "%@://%@:%@/logs",
+ "port":{
+ "http_property": "yarn.timeline-service.webapp.address",
+ "http_default_port": "8088",
+ "https_property": "yarn.timeline-service.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "yarn-site"
+ }
+ },
+ {
+ "name": "resourcemanager_jmx",
+ "label":"ResourceManager JMX",
+ "requires_user_name": "false",
+ "url":"%@://%@:%@/jmx",
+ "port":{
+ "http_property": "yarn.timeline-service.webapp.address",
+ "http_default_port": "8088",
+ "https_property": "yarn.timeline-service.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "yarn-site"
+ }
+ },
+ {
+ "name": "thread_stacks",
+ "label":"Thread Stacks",
+ "requires_user_name": "false",
+ "url":"%@://%@:%@/stacks",
+ "port":{
+ "http_property": "yarn.timeline-service.webapp.address",
+ "http_default_port": "8088",
+ "https_property": "yarn.timeline-service.webapp.https.address",
+ "https_default_port": "8090",
+ "regex": "\\w*:(\\d+)",
+ "site": "yarn-site"
+ }
+ }
+ ]
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/quicklinks/quicklinks.json
index 7e2ba25..42611d6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/quicklinks/quicklinks.json
@@ -20,7 +20,6 @@
"label": "Ranger Admin UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "http.service.port",
"http_default_port": "6080",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
index c3e51e6..685665a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/SPARK/quicklinks/quicklinks.json
@@ -13,7 +13,6 @@
"label": "Spark History Server UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "spark.history.ui.port",
"http_default_port": "18080",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/quicklinks/quicklinks.json
index 21c5e54..36e67c2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/quicklinks/quicklinks.json
@@ -25,7 +25,6 @@
"label": "Accumulo Monitor UI",
"requires_user_name": "false",
"url": "%@://%@:%@/",
- "template": "%@://%@:%@/",
"port":{
"http_property": "monitor.port.client",
"http_default_port": "50095",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/quicklinks/quicklinks.json
index dd67ec0..37e95cf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ATLAS/quicklinks/quicklinks.json
@@ -20,7 +20,6 @@
"label": "Atlas Dashboard",
"requires_user_name": "true",
"url": "%@://%@:%@/#!/search?user.name=%@",
- "template": "%@://%@:%@/#!/search?user.name=%@",
"port":{
"http_property": "atlas.server.http.port",
"http_default_port": "21000",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/quicklinks/quicklinks.json
index b52af55..5568122 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/quicklinks/quicklinks.json
@@ -13,7 +13,6 @@
"label": "HBase Master UI",
"url":"%@://%@:%@/master-status",
"requires_user_name": "false",
- "template":"%@://%@:%@/master-status",
"port":{
"http_property": "hbase.master.info.port",
"http_default_port": "60010",
@@ -28,7 +27,6 @@
"label": "HBase Logs",
"url":"%@://%@:%@/logs",
"requires_user_name": "false",
- "template":"%@://%@:%@/logs",
"port":{
"http_property": "hbase.master.info.port",
"http_default_port": "60010",
@@ -43,7 +41,6 @@
"label": "Zookeeper Info",
"url":"%@://%@:%@/zk.jsp",
"requires_user_name": "false",
- "template":"%@://%@:%@/zk.jsp",
"port":{
"http_property": "hbase.master.info.port",
"http_default_port": "60010",
@@ -58,7 +55,6 @@
"label": "HBase Master JMX",
"url":"%@://%@:%@/jmx",
"requires_user_name": "false",
- "template":"%@://%@:%@/jmx",
"port":{
"http_property": "hbase.master.info.port",
"http_default_port": "60010",
@@ -73,7 +69,6 @@
"label": "Debug Dump",
"url":"%@://%@:%@/dump",
"requires_user_name": "false",
- "template":"%@://%@:%@/dump",
"port":{
"http_property": "hbase.master.info.port",
"http_default_port": "60010",
@@ -88,7 +83,6 @@
"label": "Thread Stacks",
"url":"%@://%@:%@/stacks",
"requires_user_name": "false",
- "template":"%@://%@:%@/stacks",
"port":{
"http_property": "hbase.master.info.port",
"http_default_port": "60010",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/quicklinks/quicklinks.json
index 9fb97d0..a4216e3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/quicklinks/quicklinks.json
@@ -20,7 +20,6 @@
"label": "NameNode UI",
"url":"%@://%@:%@",
"requires_user_name": "false",
- "template":"%@://%@:%@",
"port":{
"http_property": "dfs.namenode.http-address",
"http_default_port": "50070",
@@ -35,7 +34,6 @@
"label": "NameNode Logs",
"url":"%@://%@:%@/logs",
"requires_user_name": "false",
- "template":"%@://%@:%@/logs",
"port":{
"http_property": "dfs.namenode.http-address",
"http_default_port": "50070",
@@ -50,7 +48,6 @@
"label": "NameNode JMX",
"url":"%@://%@:%@/jmx",
"requires_user_name": "false",
- "template":"%@://%@:%@/jmx",
"port":{
"http_property": "dfs.namenode.http-address",
"http_default_port": "50070",
@@ -65,7 +62,6 @@
"label": "Thread Stacks",
"url":"%@://%@:%@/stacks",
"requires_user_name": "false",
- "template":"%@://%@:%@/stacks",
"port":{
"http_property": "dfs.namenode.http-address",
"http_default_port": "50070",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/quicklinks/quicklinks.json
index e7c23a4..621fe8a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/OOZIE/quicklinks/quicklinks.json
@@ -30,7 +30,6 @@
"label": "Oozie Web UI",
"requires_user_name": "true",
"url":"%@://%@:%@/oozie?user.name=%@",
- "template":"%@://%@:%@/oozie?user.name=%@",
"port":{
"http_property": "oozie.base.url",
"http_default_port": "11000",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/quicklinks/quicklinks.json
index 74ea153..795e464 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/quicklinks/quicklinks.json
@@ -25,7 +25,6 @@
"label": "Ranger Admin UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "http.service.port",
"http_default_port": "6080",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
index c3e51e6..685665a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/SPARK/quicklinks/quicklinks.json
@@ -13,7 +13,6 @@
"label": "Spark History Server UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "spark.history.ui.port",
"http_default_port": "18080",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks-mapred/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks-mapred/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks-mapred/quicklinks.json
index 993267d..36f71b5 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks-mapred/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks-mapred/quicklinks.json
@@ -20,7 +20,6 @@
"label": "JobHistory UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -35,7 +34,6 @@
"label": "JobHistory logs",
"requires_user_name": "false",
"url": "%@://%@:%@/logs",
- "template": "%@://%@:%@/logs",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -50,7 +48,6 @@
"label":"JobHistory JMX",
"requires_user_name":"false",
"url":"%@://%@:%@/jmx",
- "template":"%@://%@/jmx",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -65,7 +62,6 @@
"label":"Thread Stacks",
"requires_user_name": "false",
"url":"%@://%@:%@/stacks",
- "template":"%@://%@/stacks",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks/quicklinks.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks/quicklinks.json
index 3cf4c7f..101be6d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks/quicklinks.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/quicklinks/quicklinks.json
@@ -20,7 +20,6 @@
"label": "ResourceManager UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
@@ -35,7 +34,6 @@
"label": "ResourceManager logs",
"requires_user_name": "false",
"url": "%@://%@:%@/logs",
- "template": "%@://%@:%@/logs",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
@@ -50,7 +48,6 @@
"label":"ResourceManager JMX",
"requires_user_name": "false",
"url":"%@://%@:%@/jmx",
- "template":"%@://%@:%@/jmx",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
@@ -65,7 +62,6 @@
"label":"Thread Stacks",
"requires_user_name": "false",
"url":"%@://%@:%@/stacks",
- "template":"%@://%@:%@/stacks",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/test/resources/child_quicklinks_to_merge.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/child_quicklinks_to_merge.json b/ambari-server/src/test/resources/child_quicklinks_to_merge.json
index 819c400..1fb09fd 100644
--- a/ambari-server/src/test/resources/child_quicklinks_to_merge.json
+++ b/ambari-server/src/test/resources/child_quicklinks_to_merge.json
@@ -20,7 +20,6 @@
"label": "JobHistory UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -35,7 +34,6 @@
"label": "JobHistory logs",
"requires_user_name": "false",
"url": "%@://%@:%@/logs",
- "template": "%@://%@:%@/logs",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -50,7 +48,6 @@
"label":"JobHistory JMX",
"requires_user_name":"false",
"url":"%@://%@:%@/jmx",
- "template":"%@://%@/jmx",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/test/resources/child_quicklinks_to_override.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/child_quicklinks_to_override.json b/ambari-server/src/test/resources/child_quicklinks_to_override.json
index b042f98..4309532 100644
--- a/ambari-server/src/test/resources/child_quicklinks_to_override.json
+++ b/ambari-server/src/test/resources/child_quicklinks_to_override.json
@@ -30,7 +30,6 @@
"label": "JobHistory UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -45,7 +44,6 @@
"label": "JobHistory logs",
"requires_user_name": "false",
"url": "%@://%@:%@/logs",
- "template": "%@://%@:%@/logs",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -60,7 +58,6 @@
"label":"JobHistory JMX",
"requires_user_name":"false",
"url":"%@://%@:%@/jmx",
- "template":"%@://%@/jmx",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
@@ -75,7 +72,6 @@
"label":"Thread Stacks",
"requires_user_name": "false",
"url":"%@://%@:%@/stacks",
- "template":"%@://%@/stacks",
"port":{
"http_property": "mapreduce.jobhistory.webapp.address",
"http_default_port": "19888",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-server/src/test/resources/parent_quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/parent_quicklinks.json b/ambari-server/src/test/resources/parent_quicklinks.json
index 3cf4c7f..101be6d 100644
--- a/ambari-server/src/test/resources/parent_quicklinks.json
+++ b/ambari-server/src/test/resources/parent_quicklinks.json
@@ -20,7 +20,6 @@
"label": "ResourceManager UI",
"requires_user_name": "false",
"url": "%@://%@:%@",
- "template": "%@://%@:%@",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
@@ -35,7 +34,6 @@
"label": "ResourceManager logs",
"requires_user_name": "false",
"url": "%@://%@:%@/logs",
- "template": "%@://%@:%@/logs",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
@@ -50,7 +48,6 @@
"label":"ResourceManager JMX",
"requires_user_name": "false",
"url":"%@://%@:%@/jmx",
- "template":"%@://%@:%@/jmx",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
@@ -65,7 +62,6 @@
"label":"Thread Stacks",
"requires_user_name": "false",
"url":"%@://%@:%@/stacks",
- "template":"%@://%@:%@/stacks",
"port":{
"http_property": "yarn.timeline-service.webapp.address",
"http_default_port": "8088",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-web/app/assets/data/configurations/quicklinks.json
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/data/configurations/quicklinks.json b/ambari-web/app/assets/data/configurations/quicklinks.json
index 505a619..e5873ab 100644
--- a/ambari-web/app/assets/data/configurations/quicklinks.json
+++ b/ambari-web/app/assets/data/configurations/quicklinks.json
@@ -29,7 +29,6 @@
"name" : "resourcemanager_ui",
"label" : "ResourceManager UI",
"url" : "%@://%@:%@",
- "template" : "%@://%@:%@",
"port" : {
"regex" : "\\w*:(\\d+)",
"site" : "yarn-site",
@@ -46,7 +45,6 @@
"name" : "resourcemanager_logs",
"label" : "ResourceManager logs",
"url" : "%@://%@:%@/logs",
- "template" : "%@://%@:%@/logs",
"port" : {
"regex" : "\\w*:(\\d+)",
"site" : "yarn-site",
@@ -63,7 +61,6 @@
"name" : "resourcemanager_jmx",
"label" : "ResourceManager JMX",
"url" : "%@://%@:%@/jmx",
- "template" : "%@://%@:%@/jmx",
"port" : {
"regex" : "\\w*:(\\d+)",
"site" : "yarn-site",
@@ -80,7 +77,6 @@
"name" : "thread_stacks",
"label" : "Thread Stacks",
"url" : "%@://%@:%@/stacks",
- "template" : "%@://%@:%@/stacks",
"port" : {
"regex" : "\\w*:(\\d+)",
"site" : "yarn-site",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5cb0fadd/ambari-web/app/views/common/quick_view_link_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/quick_view_link_view.js b/ambari-web/app/views/common/quick_view_link_view.js
index 567f65c..1eef254 100644
--- a/ambari-web/app/views/common/quick_view_link_view.js
+++ b/ambari-web/app/views/common/quick_view_link_view.js
@@ -278,8 +278,8 @@ App.QuickViewLinks = Em.View.extend({
toAddLink: function(link){
var linkRemoved = Em.get(link, 'removed');
- var template = Em.get(link, 'template');
- return (template && !linkRemoved);
+ var url = Em.get(link, 'url');
+ return (url && !linkRemoved);
},
getHostLink: function(link, host, protocol, configProperties, response){
@@ -302,7 +302,7 @@ App.QuickViewLinks = Em.View.extend({
if (this.toAddLink(link)) {
var newItem = {};
var requiresUserName = Em.get(link, 'requires_user_name');
- var template = Em.get(link, 'template');
+ var template = Em.get(link, 'url');
if('true' === requiresUserName){
newItem.url = template.fmt(protocol, host, linkPort, App.router.get('loginName'));
} else {
@@ -377,8 +377,8 @@ App.QuickViewLinks = Em.View.extend({
var links = Em.get(quickLinksConfig, 'links');
links.forEach(function(link){
var linkRemoved = Em.get(link, 'removed');
- var template = Em.get(link, 'template');
- if (template && !linkRemoved) {
+ var url = Em.get(link, 'url');
+ if (url && !linkRemoved) {
var port;
var hostNameRegExp = new RegExp('([\\w\\W]*):\\d+');
if (serviceName === 'HDFS') {
[10/24] ambari git commit: AMBARI-14859. Ranger - Keep all temporary
files in the tmp directory defined in the Agent's configuration(gautam)
Posted by nc...@apache.org.
AMBARI-14859. Ranger - Keep all temporary files in the tmp directory defined in the Agent's configuration(gautam)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/472d62a7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/472d62a7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/472d62a7
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 472d62a7b3c0383ad4d95fe659ac87b4a8da3a1f
Parents: 12faf8c
Author: Gautam Borad <ga...@apache.org>
Authored: Tue Jan 12 18:56:35 2016 +0530
Committer: Gautam Borad <ga...@apache.org>
Committed: Tue Feb 9 15:30:01 2016 +0530
----------------------------------------------------------------------
.../RANGER/0.4.0/package/scripts/params.py | 2 +
.../0.4.0/package/scripts/ranger_admin.py | 50 +++++++++++---------
.../0.4.0/package/scripts/service_check.py | 12 ++---
.../0.4.0/package/scripts/status_params.py | 27 +++++++++++
4 files changed, 61 insertions(+), 30 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/472d62a7/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
index b6e5ee9..6b6bf28 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py
@@ -45,6 +45,8 @@ host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+upgrade_marker_file = format("{tmp_dir}/rangeradmin_ru.inprogress")
+
xml_configurations_supported = config['configurations']['ranger-env']['xml_configurations_supported']
create_db_dbuser = config['configurations']['ranger-env']['create_db_dbuser']
http://git-wip-us.apache.org/repos/asf/ambari/blob/472d62a7/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
index a0007b3..5bcf6b9 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
@@ -29,8 +29,6 @@ import os, errno
class RangerAdmin(Script):
- upgrade_marker_file = '/tmp/rangeradmin_ru.inprogress'
-
def get_stack_to_component(self):
return {"HDP": "ranger-admin"}
@@ -67,11 +65,14 @@ class RangerAdmin(Script):
setup_ranger_db(upgrade_type=upgrade_type)
setup_java_patch(upgrade_type=upgrade_type)
- self.set_ru_rangeradmin_in_progress()
+ self.set_ru_rangeradmin_in_progress(params.upgrade_marker_file)
def post_upgrade_restart(self,env, upgrade_type=None):
- if os.path.isfile(RangerAdmin.upgrade_marker_file):
- os.remove(RangerAdmin.upgrade_marker_file)
+ import params
+ env.set_params(params)
+
+ if os.path.isfile(params.upgrade_marker_file):
+ os.remove(params.upgrade_marker_file)
def start(self, env, upgrade_type=None):
import params
@@ -81,15 +82,18 @@ class RangerAdmin(Script):
def status(self, env):
+ import status_params
+
+ env.set_params(status_params)
cmd = 'ps -ef | grep proc_rangeradmin | grep -v grep'
code, output = shell.call(cmd, timeout=20)
if code != 0:
- if self.is_ru_rangeradmin_in_progress():
- Logger.info('Ranger admin process not running - skipping as stack upgrade is in progress')
+ if self.is_ru_rangeradmin_in_progress(status_params.upgrade_marker_file):
+ Logger.info('Ranger admin process not running - skipping as stack upgrade is in progress')
else:
- Logger.debug('Ranger admin process not running')
- raise ComponentIsNotRunning()
+ Logger.debug('Ranger admin process not running')
+ raise ComponentIsNotRunning()
pass
def configure(self, env):
@@ -102,23 +106,23 @@ class RangerAdmin(Script):
ranger('ranger_admin')
- def set_ru_rangeradmin_in_progress(self):
- config_dir = os.path.dirname(RangerAdmin.upgrade_marker_file)
+ def set_ru_rangeradmin_in_progress(self, upgrade_marker_file):
+ config_dir = os.path.dirname(upgrade_marker_file)
try:
- msg = "Starting Upgrade"
- if (not os.path.exists(config_dir)):
- os.makedirs(config_dir)
- ofp = open(RangerAdmin.upgrade_marker_file, 'w')
- ofp.write(msg)
- ofp.close()
+ msg = "Starting Upgrade"
+ if (not os.path.exists(config_dir)):
+ os.makedirs(config_dir)
+ ofp = open(upgrade_marker_file, 'w')
+ ofp.write(msg)
+ ofp.close()
except OSError as exc:
- if exc.errno == errno.EEXIST and os.path.isdir(config_dir):
- pass
- else:
- raise
+ if exc.errno == errno.EEXIST and os.path.isdir(config_dir):
+ pass
+ else:
+ raise
- def is_ru_rangeradmin_in_progress(self):
- return os.path.isfile(RangerAdmin.upgrade_marker_file)
+ def is_ru_rangeradmin_in_progress(self, upgrade_marker_file):
+ return os.path.isfile(upgrade_marker_file)
if __name__ == "__main__":
RangerAdmin().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/472d62a7/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/service_check.py
index 699e3c4..fb6af95 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/service_check.py
@@ -27,16 +27,14 @@ import os
class RangerServiceCheck(Script):
- upgrade_marker_file = '/tmp/rangeradmin_ru.inprogress'
-
def service_check(self, env):
import params
env.set_params(params)
- self.check_ranger_admin_service(params.ranger_external_url)
+ self.check_ranger_admin_service(params.ranger_external_url, params.upgrade_marker_file)
- def check_ranger_admin_service(self, ranger_external_url):
- if (self.is_ru_rangeradmin_in_progress()):
+ def check_ranger_admin_service(self, ranger_external_url, upgrade_marker_file):
+ if (self.is_ru_rangeradmin_in_progress(upgrade_marker_file)):
Logger.info('Ranger admin process not running - skipping as stack upgrade is in progress')
else:
Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate -u: -k {ranger_external_url}/login.jsp | grep 200"),
@@ -44,8 +42,8 @@ class RangerServiceCheck(Script):
try_sleep=3,
logoutput=True)
- def is_ru_rangeradmin_in_progress(self):
- return os.path.isfile(RangerServiceCheck.upgrade_marker_file)
+ def is_ru_rangeradmin_in_progress(self, upgrade_marker_file):
+ return os.path.isfile(upgrade_marker_file)
if __name__ == "__main__":
RangerServiceCheck().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/472d62a7/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/status_params.py
new file mode 100644
index 0000000..b932f88
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/status_params.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script import Script
+from resource_management.libraries.functions.format import format
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+upgrade_marker_file = format("{tmp_dir}/rangeradmin_ru.inprogress")
\ No newline at end of file
[19/24] ambari git commit: AMBARI-14962 : Auto configure AMS UI to
use AMS Datasource (avijayan)
Posted by nc...@apache.org.
AMBARI-14962 : Auto configure AMS UI to use AMS Datasource (avijayan)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/14396f2b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/14396f2b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/14396f2b
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 14396f2b74115744d6f4c93b97a9911cd1e58ff1
Parents: 38a5225
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Mon Feb 8 13:22:15 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Feb 9 08:18:51 2016 -0800
----------------------------------------------------------------------
.../0.1.0/package/scripts/metrics_grafana.py | 2 +
.../package/scripts/metrics_grafana_util.py | 131 +++++++++++++++++++
.../metrics_grafana_datasource.json.j2 | 15 +++
.../AMBARI_METRICS/test_metrics_grafana.py | 19 ++-
4 files changed, 163 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/14396f2b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
index f876036..d96309c 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana.py
@@ -22,6 +22,7 @@ from resource_management import Script, Execute
from resource_management.libraries.functions import format
from status import check_service_status
from ams import ams
+from metrics_grafana_util import create_ams_datasource
class AmsGrafana(Script):
def install(self, env):
@@ -46,6 +47,7 @@ class AmsGrafana(Script):
Execute(start_cmd,
user=params.ams_user
)
+ create_ams_datasource()
def stop(self, env):
import params
http://git-wip-us.apache.org/repos/asf/ambari/blob/14396f2b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
new file mode 100644
index 0000000..c4a91e1
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from resource_management.core.logger import Logger
+from resource_management.core.base import Fail
+from resource_management import Template
+
+import httplib
+import time
+import socket
+import json
+
+def create_ams_datasource():
+
+ import params
+ GRAFANA_CONNECT_TRIES = 5
+ GRAFANA_CONNECT_TIMEOUT = 15
+ GRAFANA_URL = "/api/datasources"
+ METRICS_GRAFANA_DATASOURCE_NAME = "AMBARI_METRICS"
+
+ headers = {"Content-type": "application/json"}
+
+ Logger.info("Checking if AMS Grafana datasource already exists")
+ Logger.info("Connecting (GET) to %s:%s%s" % (params.hostname,
+ params.ams_grafana_port,
+ GRAFANA_URL))
+
+ conn = httplib.HTTPConnection(params.hostname,
+ int(params.ams_grafana_port))
+
+ conn.request("GET", GRAFANA_URL)
+ response = conn.getresponse()
+ Logger.info("Http response: %s %s" % (response.status, response.reason))
+
+ if(response.status == 200):
+ datasources = response.read()
+ datasources_json = json.loads(datasources)
+ for i in xrange(0, len(datasources_json)):
+ datasource_name = datasources_json[i]["name"]
+ if(datasource_name == METRICS_GRAFANA_DATASOURCE_NAME):
+
+ Logger.info("Ambari Metrics Grafana datasource already present. Checking Metrics Collector URL")
+ datasource_url = datasources_json[i]["url"]
+
+ if datasource_url == (params.metric_collector_host + ":" + params.metric_collector_port
+ + "/ws/v1/timeline/metrics") :
+ Logger.info("Metrics Collector URL validation succeeded. Skipping datasource creation")
+ GRAFANA_CONNECT_TRIES = 0 # No need to create datasource again
+
+ else: # Metrics datasource present, but collector host is wrong.
+
+ Logger.info("Metrics Collector URL validation failed.")
+ datasource_id = datasources_json[i]["id"]
+ Logger.info("Deleting obselete Metrics datasource.")
+ conn = httplib.HTTPConnection(params.hostname, int(params.ams_grafana_port))
+ conn.request("DELETE", GRAFANA_URL + "/" + str(datasource_id))
+ response = conn.getresponse()
+ Logger.info("Http response: %s %s" % (response.status, response.reason))
+
+ break
+ else:
+ Logger.info("Error checking for Ambari Metrics Grafana datasource. Will attempt to create.")
+
+ if GRAFANA_CONNECT_TRIES > 0:
+ Logger.info("Attempting to create Ambari Metrics Grafana datasource")
+
+ for i in xrange(0, GRAFANA_CONNECT_TRIES):
+ try:
+ ams_datasource_json = Template('metrics_grafana_datasource.json.j2',
+ ams_datasource_name=METRICS_GRAFANA_DATASOURCE_NAME,
+ ams_collector_host=params.metric_collector_host,
+ ams_collector_port=params.metric_collector_port).get_content()
+
+ Logger.info("Generated datasource:\n%s" % ams_datasource_json)
+
+ Logger.info("Connecting (POST) to %s:%s%s" % (params.hostname,
+ params.ams_grafana_port,
+ GRAFANA_URL))
+ conn = httplib.HTTPConnection(params.hostname,
+ int(params.ams_grafana_port))
+ conn.request("POST", GRAFANA_URL, ams_datasource_json, headers)
+
+ response = conn.getresponse()
+ Logger.info("Http response: %s %s" % (response.status, response.reason))
+ except (httplib.HTTPException, socket.error) as ex:
+ if i < GRAFANA_CONNECT_TRIES - 1:
+ time.sleep(GRAFANA_CONNECT_TIMEOUT)
+ Logger.info("Connection to Grafana failed. Next retry in %s seconds."
+ % (GRAFANA_CONNECT_TIMEOUT))
+ continue
+ else:
+ raise Fail("Ambari Metrics Grafana datasource not created")
+
+ data = response.read()
+ Logger.info("Http data: %s" % data)
+ conn.close()
+
+ if response.status == 200:
+ Logger.info("Ambari Metrics Grafana data source created.")
+ break
+ elif response.status == 500:
+ Logger.info("Ambari Metrics Grafana data source creation failed. Not retrying.")
+ raise Fail("Ambari Metrics Grafana data source creation failed. POST request status: %s %s \n%s" %
+ (response.status, response.reason, data))
+ else:
+ Logger.info("Ambari Metrics Grafana data source creation failed.")
+ if i < GRAFANA_CONNECT_TRIES - 1:
+ time.sleep(GRAFANA_CONNECT_TIMEOUT)
+ Logger.info("Next retry in %s seconds."
+ % (GRAFANA_CONNECT_TIMEOUT))
+ else:
+ raise Fail("Ambari Metrics Grafana data source creation failed. POST request status: %s %s \n%s" %
+ (response.status, response.reason, data))
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/14396f2b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
new file mode 100644
index 0000000..3edc6fb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
@@ -0,0 +1,15 @@
+{
+ "name": "{{ams_datasource_name}}",
+ "type": "ambarimetrics",
+ "access": "proxy",
+ "url": "{{ams_collector_host}}:{{ams_collector_port}}/ws/v1/timeline/metrics",
+ "password": "",
+ "user": "",
+ "database": "",
+ "basicAuth": false,
+ "basicAuthUser": "",
+ "basicAuthPassword": "",
+ "withCredentials": false,
+ "isDefault": false,
+ "jsonData": {}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/14396f2b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_grafana.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_grafana.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_grafana.py
index 5bc1412..755bb4f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_grafana.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_grafana.py
@@ -18,17 +18,27 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
-from mock.mock import MagicMock, patch
+from mock.mock import MagicMock, patch, call
from stacks.utils.RMFTestCase import *
+import os, sys
@patch("os.path.exists", new = MagicMock(return_value=True))
@patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
class TestMetricsGrafana(RMFTestCase):
- COMMON_SERVICES_PACKAGE_DIR = "AMBARI_METRICS/0.1.0/package"
+ COMMON_SERVICES_PACKAGE_DIR = "AMBARI_METRICS/0.1.0/package/scripts"
STACK_VERSION = "2.0.6"
- def test_start(self):
- self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/metrics_grafana.py",
+ file_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(file_path)))))
+ file_path = os.path.join(file_path, "main", "resources", "common-services", COMMON_SERVICES_PACKAGE_DIR)
+
+ sys.path.append(file_path)
+ global metrics_grafana_util
+ import metrics_grafana_util
+
+ @patch("metrics_grafana_util.create_ams_datasource")
+ def test_start(self, create_ams_datasource_mock):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/metrics_grafana.py",
classname = "AmsGrafana",
command = "start",
config_file="default.json",
@@ -43,6 +53,7 @@ class TestMetricsGrafana(RMFTestCase):
self.assertResourceCalled('Execute', '/usr/sbin/ambari-metrics-grafana start',
user = 'ams'
)
+ create_ams_datasource_mock.assertCalled()
self.assertNoMoreResources()
def assert_configure(self):
[08/24] ambari git commit: AMBARI-14963. Cannot login due to a JS
error (alexantonenko)
Posted by nc...@apache.org.
AMBARI-14963. Cannot login due to a JS error (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d9faf741
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d9faf741
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d9faf741
Branch: refs/heads/branch-dev-patch-upgrade
Commit: d9faf7412c5d1968ba74d1eab3487b7e374ea002
Parents: 5352638
Author: Alex Antonenko <hi...@gmail.com>
Authored: Tue Feb 9 01:41:32 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Tue Feb 9 02:41:31 2016 +0200
----------------------------------------------------------------------
ambari-web/app/router.js | 11 ++++++++++-
1 file changed, 10 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/d9faf741/ambari-web/app/router.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/router.js b/ambari-web/app/router.js
index 809979e..720ac0b 100644
--- a/ambari-web/app/router.js
+++ b/ambari-web/app/router.js
@@ -349,9 +349,18 @@ App.Router = Em.Router.extend({
/**
* success callback of router.login.message
* @param {object} data
+ * @param {object} opt
+ * @param {object} params
*/
showLoginMessage: function (data, opt, params){
- var response = JSON.parse(data.Settings.content.replace(/\n/g, "\\n")),
+ try {
+ var response = JSON.parse(data.Settings.content.replace(/\n/g, "\\n"))
+ } catch (e) {
+ this.setClusterData(data, opt, params);
+ return false;
+ }
+
+ var
text = response.text ? response.text : "",
buttonText = response.button ? response.button : Em.I18n.t('ok'),
status = response.status && response.status == "true" ? true : false,
[23/24] ambari git commit: AMBARI-14990. 'ambari-server setup -s'
should not pickup $JAVA_HOME as jdk location (aonishuk)
Posted by nc...@apache.org.
AMBARI-14990. 'ambari-server setup -s' should not pickup $JAVA_HOME as jdk location (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/46bbbf9c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/46bbbf9c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/46bbbf9c
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 46bbbf9c2c72b52f107d4d929ab3dc6dc05577d4
Parents: feb50e3
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Feb 10 13:20:54 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Feb 10 13:20:54 2016 +0200
----------------------------------------------------------------------
.../src/main/python/ambari_server/serverSetup.py | 17 -----------------
1 file changed, 17 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/46bbbf9c/ambari-server/src/main/python/ambari_server/serverSetup.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverSetup.py b/ambari-server/src/main/python/ambari_server/serverSetup.py
index 511da27..c370257 100644
--- a/ambari-server/src/main/python/ambari_server/serverSetup.py
+++ b/ambari-server/src/main/python/ambari_server/serverSetup.py
@@ -432,23 +432,6 @@ class JDKSetup(object):
else:
progress_func = download_progress
- if get_silent():
- if not java_home_var:
- #No java_home_var set, detect if java is already installed
- if os.environ.has_key(JAVA_HOME):
- args.java_home = os.environ[JAVA_HOME]
-
- properties.process_pair(JAVA_HOME_PROPERTY, args.java_home)
- properties.removeOldProp(JDK_NAME_PROPERTY)
- properties.removeOldProp(JCE_NAME_PROPERTY)
-
- self._ensure_java_home_env_var_is_set(args.java_home)
- self.jdk_index = self.custom_jdk_number
- return
- else:
- # For now, changing the existing JDK to make sure we use a supported one
- pass
-
if java_home_var:
change_jdk = get_YN_input("Do you want to change Oracle JDK [y/n] (n)? ", False)
if not change_jdk:
[20/24] ambari git commit: AMBARI-14962 : Auto configure AMS UI to
use AMS Datasource - Commit 2 (avijayan)
Posted by nc...@apache.org.
AMBARI-14962 : Auto configure AMS UI to use AMS Datasource - Commit 2 (avijayan)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/37122a6d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/37122a6d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/37122a6d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 37122a6d3aa7f5b5cb48c85a420be1c389010951
Parents: 14396f2
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Feb 9 11:09:26 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Feb 9 11:09:26 2016 -0800
----------------------------------------------------------------------
.../templates/metrics_grafana_datasource.json.j2 | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/37122a6d/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
index 3edc6fb..1b03a2d 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2
@@ -1,3 +1,21 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
{
"name": "{{ams_datasource_name}}",
"type": "ambarimetrics",
[15/24] ambari git commit: AMBARI-14961. Ambari overwrites
auth_to_local rules in core-site.xml (dlysnichenko)
Posted by nc...@apache.org.
AMBARI-14961. Ambari overwrites auth_to_local rules in core-site.xml (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/42945001
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/42945001
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/42945001
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 4294500105a3e7f12be3f14801dd319a6b95f489
Parents: dfff43d
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Feb 9 17:02:44 2016 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Feb 9 17:02:44 2016 +0200
----------------------------------------------------------------------
.../ambari/server/controller/KerberosHelperImpl.java | 8 +++++++-
.../KERBEROS/1.10.3-10/configuration/kerberos-env.xml | 14 ++++++++++++++
ambari-web/app/data/HDP2/site_properties.js | 7 +++++++
3 files changed, 28 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/42945001/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index fe1ba46..be6edc9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -651,7 +651,13 @@ public class KerberosHelperImpl implements KerberosHelper {
Map<String, Map<String, String>> kerberosConfigurations)
throws AmbariException {
- if (kerberosDescriptor != null) {
+ boolean processAuthToLocalRules = true;
+ Map<String, String> kerberosEnvProperties = existingConfigurations.get("kerberos-env");
+ if(kerberosEnvProperties.containsKey("manage_auth_to_local")) {
+ processAuthToLocalRules = Boolean.valueOf(kerberosEnvProperties.get("manage_auth_to_local"));
+ }
+
+ if (kerberosDescriptor != null && processAuthToLocalRules) {
Set<String> authToLocalProperties;
Set<String> authToLocalPropertiesToSet = new HashSet<String>();
http://git-wip-us.apache.org/repos/asf/ambari/blob/42945001/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index 25a5533..a03dea6 100644
--- a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -50,6 +50,20 @@
</property>
<property>
+ <name>manage_auth_to_local</name>
+ <description>
+ Indicates whether the hadoop auth_to_local rules should be managed by Ambari or managed manually.
+ </description>
+ <value>true</value>
+ <display-name>Manage Hadoop auth_to_local rules</display-name>
+ <value-attributes>
+ <visible>true</visible>
+ <overridable>false</overridable>
+ <type>boolean</type>
+ </value-attributes>
+ </property>
+
+ <property>
<name>install_packages</name>
<display-name>Install OS-specific Kerberos client package(s)</display-name>
<description>
http://git-wip-us.apache.org/repos/asf/ambari/blob/42945001/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 385b1bf..3ea6c68 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -1491,6 +1491,13 @@ var hdp2properties = [
"index" : 13
},
{
+ "name": "manage_auth_to_local",
+ "serviceName": "KERBEROS",
+ "filename": "kerberos-env.xml",
+ "category": "Advanced kerberos-env",
+ "index" : 14
+ },
+ {
"name": "admin_server_host",
"serviceName": "KERBEROS",
"filename": "kerberos-env.xml",
[12/24] ambari git commit: AMBARI-14969. autobootstrap failed on
Cenots7 (aonishuk)
Posted by nc...@apache.org.
AMBARI-14969. autobootstrap failed on Cenots7 (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f25dd31a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f25dd31a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f25dd31a
Branch: refs/heads/branch-dev-patch-upgrade
Commit: f25dd31aa998b257a7bb9d1efc2aca9144b5f607
Parents: f734aec
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Feb 9 12:38:01 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Feb 9 12:38:01 2016 +0200
----------------------------------------------------------------------
ambari-server/src/main/python/bootstrap.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f25dd31a/ambari-server/src/main/python/bootstrap.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/bootstrap.py b/ambari-server/src/main/python/bootstrap.py
index 75bb26a..ad428af 100755
--- a/ambari-server/src/main/python/bootstrap.py
+++ b/ambari-server/src/main/python/bootstrap.py
@@ -18,12 +18,14 @@ See the License for the specific language governing permissions and
limitations under the License.
'''
+import sys
+sys.path.append("/usr/lib/python2.6/site-packages/") # this file can be run with python2.7 that why we need this
+
# On Linux, the bootstrap process is supposed to run on hosts that may have installed Python 2.4 and above (CentOS 5).
# Hence, the whole bootstrap code needs to comply with Python 2.4 instead of Python 2.6. Most notably, @-decorators and
# {}-format() are to be avoided.
import time
-import sys
import logging
import pprint
import os
[13/24] ambari git commit: AMBARI-14967. Incorrect behavior of period
combobox/metrics on Dashboard page after resetting all widgets to default
(alexantonenko)
Posted by nc...@apache.org.
AMBARI-14967. Incorrect behavior of period combobox/metrics on Dashboard page after resetting all widgets to default (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cb3b3ed4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cb3b3ed4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cb3b3ed4
Branch: refs/heads/branch-dev-patch-upgrade
Commit: cb3b3ed444812fa34baf05222c21360ec7268c6a
Parents: f25dd31
Author: Alex Antonenko <hi...@gmail.com>
Authored: Tue Feb 9 11:57:41 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Tue Feb 9 12:39:30 2016 +0200
----------------------------------------------------------------------
ambari-web/app/views/main/dashboard/widgets.js | 5 +++
.../test/views/main/dashboard/widgets_test.js | 42 +++++++++++++++++---
2 files changed, 41 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/cb3b3ed4/ambari-web/app/views/main/dashboard/widgets.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets.js b/ambari-web/app/views/main/dashboard/widgets.js
index 95afba7..a2fb281 100644
--- a/ambari-web/app/views/main/dashboard/widgets.js
+++ b/ambari-web/app/views/main/dashboard/widgets.js
@@ -504,6 +504,11 @@ App.MainDashboardWidgetsView = Em.View.extend(App.UserPref, App.LocalStorage, Ap
self.postUserPref(self.get('persistKey'), self.get('initPrefObject'));
self.setDBProperty(self.get('persistKey'), self.get('initPrefObject'));
}
+ self.setProperties({
+ currentTimeRangeIndex: 0,
+ customStartTime: null,
+ customEndTime: null
+ });
self.translateToReal(self.get('initPrefObject'));
});
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/cb3b3ed4/ambari-web/test/views/main/dashboard/widgets_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/dashboard/widgets_test.js b/ambari-web/test/views/main/dashboard/widgets_test.js
index ca0837a..49a06a4 100644
--- a/ambari-web/test/views/main/dashboard/widgets_test.js
+++ b/ambari-web/test/views/main/dashboard/widgets_test.js
@@ -449,16 +449,46 @@ describe('App.MainDashboardWidgetsView', function () {
});
describe("#resetAllWidgets()", function () {
- before(function () {
- sinon.stub(App, 'showConfirmationPopup', Em.K);
+
+ beforeEach(function () {
+ sinon.stub(App, 'showConfirmationPopup', Em.clb);
+ sinon.stub(view, 'postUserPref', Em.K);
+ sinon.stub(view, 'setDBProperty', Em.K);
+ sinon.stub(view, 'translateToReal', Em.K);
+ view.setProperties({
+ currentTimeRangeIndex: 1,
+ customStartTime: 1000,
+ customEndTime: 2000
+ });
+ view.resetAllWidgets();
});
- after(function () {
+
+ afterEach(function () {
App.showConfirmationPopup.restore();
+ view.postUserPref.restore();
+ view.setDBProperty.restore();
+ view.translateToReal.restore();
});
- it("showConfirmationPopup is called once", function () {
- view.resetAllWidgets();
- expect(App.showConfirmationPopup.calledOnce).to.be.true;
+
+ it('persist reset', function () {
+ expect(view.postUserPref.calledOnce).to.be.true;
+ });
+ it('local storage reset', function () {
+ expect(view.setDBProperty.calledOnce).to.be.true;
+ });
+ it('time range reset', function () {
+ expect(view.get('currentTimeRangeIndex')).to.equal(0);
});
+ it('custom start time reset', function () {
+ expect(view.get('customStartTime')).to.be.null;
+ });
+ it('custom end time reset', function () {
+ expect(view.get('customEndTime')).to.be.null;
+ });
+ it('default settings application', function () {
+ expect(view.translateToReal.calledOnce).to.be.true;
+ });
+
});
describe('#checkServicesChange', function () {
[11/24] ambari git commit: AMBARI-14957 Paging not working properly
when filtering with alerts and hostname. (atkach)
Posted by nc...@apache.org.
AMBARI-14957 Paging not working properly when filtering with alerts and hostname. (atkach)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f734aec9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f734aec9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f734aec9
Branch: refs/heads/branch-dev-patch-upgrade
Commit: f734aec99bacb9ec6115574d890b74cc1d9477f5
Parents: 472d62a
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Mon Feb 8 16:54:55 2016 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Tue Feb 9 12:04:34 2016 +0200
----------------------------------------------------------------------
ambari-web/app/controllers/global/update_controller.js | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f734aec9/ambari-web/app/controllers/global/update_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/global/update_controller.js b/ambari-web/app/controllers/global/update_controller.js
index 0168398..04e7427 100644
--- a/ambari-web/app/controllers/global/update_controller.js
+++ b/ambari-web/app/controllers/global/update_controller.js
@@ -118,6 +118,8 @@ App.UpdateController = Em.Controller.extend({
var params = '';
queryParams.forEach(function (param) {
+ var customKey = param.key;
+
switch (param.type) {
case 'EQUAL':
params += param.key + '=' + param.value;
@@ -138,10 +140,10 @@ App.UpdateController = Em.Controller.extend({
params += 'sortBy=' + param.key + '.' + param.value;
break;
case 'CUSTOM':
- param.value.forEach(function(item, index){
- param.key = param.key.replace('{' + index + '}', item);
+ param.value.forEach(function (item, index) {
+ customKey = customKey.replace('{' + index + '}', item);
}, this);
- params += param.key;
+ params += customKey;
break;
}
params += '&';
[17/24] ambari git commit: AMBARI-14971 UI - lag in Create Versions
page doesn't show confirmation. (ababiichuk)
Posted by nc...@apache.org.
AMBARI-14971 UI - lag in Create Versions page doesn't show confirmation. (ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/400d4954
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/400d4954
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/400d4954
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 400d49548755a8c50760dd1bfcba81dacaba79f6
Parents: ef1b98b
Author: ababiichuk <ab...@hortonworks.com>
Authored: Tue Feb 9 15:37:20 2016 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Tue Feb 9 17:46:47 2016 +0200
----------------------------------------------------------------------
.../scripts/controllers/stackVersions/StackVersionsCreateCtrl.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/400d4954/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index fc4bee9..532e5f4 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -60,7 +60,7 @@ angular.module('ambariAdminConsole')
.success(function () {
var versionName = $scope.upgradeStack.selected.stack_version + '.' + $scope.repoSubversion;
var stackName = $scope.upgradeStack.selected.stack_name;
- Alert.success($t('versions.alerts.versionCreated'), {stackName: stackName, versionName: versionName});
+ Alert.success($t('versions.alerts.versionCreated', {stackName: stackName, versionName: versionName}));
$location.path('/stackVersions');
})
.error(function (data) {
[16/24] ambari git commit: AMBARI-14973. ambari-agent upstart script
restart triggers the restart of hbase specific JVM processes (aonishuk)
Posted by nc...@apache.org.
AMBARI-14973. ambari-agent upstart script restart triggers the restart of hbase specific JVM processes (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ef1b98b3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ef1b98b3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ef1b98b3
Branch: refs/heads/branch-dev-patch-upgrade
Commit: ef1b98b3c77a5b4ebafbd5cc9c720ad9f13b63e8
Parents: 4294500
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Feb 9 17:40:11 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Feb 9 17:40:11 2016 +0200
----------------------------------------------------------------------
ambari-agent/src/main/python/ambari_agent/PythonExecutor.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ef1b98b3/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
index 0d431bc..d75fc0c 100644
--- a/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
+++ b/ambari-agent/src/main/python/ambari_agent/PythonExecutor.py
@@ -166,6 +166,9 @@ class PythonExecutor(object):
else:
structured_out = {}
return out, error, structured_out
+
+ def preexec_fn(self):
+ os.setpgid(0, 0)
def launch_python_subprocess(self, command, tmpout, tmperr):
"""
@@ -181,7 +184,7 @@ class PythonExecutor(object):
return subprocess.Popen(command,
stdout=tmpout,
- stderr=tmperr, close_fds=close_fds, env=command_env)
+ stderr=tmperr, close_fds=close_fds, env=command_env, preexec_fn=self.preexec_fn)
def isSuccessfull(self, returncode):
return not self.python_process_has_been_killed and returncode == 0
[03/24] ambari git commit: AMBARI-14958. Alerts: Create new Alerts
Notification type for SNMP to handle Ambari MIB (onechiporenko)
Posted by nc...@apache.org.
AMBARI-14958. Alerts: Create new Alerts Notification type for SNMP to handle Ambari MIB (onechiporenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e139ef57
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e139ef57
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e139ef57
Branch: refs/heads/branch-dev-patch-upgrade
Commit: e139ef57ce0ec3d705489a36cb2d7cc78f6458c1
Parents: 5cb0fad
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Mon Feb 8 16:58:17 2016 +0200
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Mon Feb 8 18:52:14 2016 +0200
----------------------------------------------------------------------
.../manage_alert_notifications_controller.js | 74 +++++++-----
...anage_alert_notifications_controller_test.js | 121 +++++++++++--------
.../views/common/log_file_search_view_test.js | 3 +-
3 files changed, 116 insertions(+), 82 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e139ef57/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js b/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
index 79e524e..244b8f0 100644
--- a/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
+++ b/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
@@ -61,9 +61,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
value: '',
defaultValue: 'custom',
disabled: false,
- isAll: function () {
- return this.get('value') == 'all';
- }.property('value')
+ isAll: Em.computed.equal('value', 'all')
}),
method: {
label: Em.I18n.t('alerts.actions.manage_alert_notifications_popup.method'),
@@ -159,7 +157,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
* used in Type combobox
* @type {Array}
*/
- methods: ['EMAIL', 'SNMP'],
+ methods: ['EMAIL', 'SNMP', 'Custom SNMP'],
/**
* List of available value for Severity Filter
@@ -220,7 +218,8 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
'mail.smtp.from',
'mail.smtp.host',
'mail.smtp.port',
- 'mail.smtp.starttls.enable'
+ 'mail.smtp.starttls.enable',
+ 'ambari.dispatch-property.script'
],
validationMap: {
@@ -242,6 +241,16 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
validator: 'retypePasswordValidation'
}
],
+ 'Custom SNMP': [
+ {
+ errorKey: 'portError',
+ validator: 'portValidation'
+ },
+ {
+ errorKey: 'hostError',
+ validator: 'hostsValidation'
+ }
+ ],
SNMP: [
{
errorKey: 'portError',
@@ -322,24 +331,25 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
fillEditCreateInputs: function (addCopyToName) {
var inputFields = this.get('inputFields');
var selectedAlertNotification = this.get('selectedAlertNotification');
+ var props = selectedAlertNotification.get('properties');
inputFields.set('name.value', (addCopyToName ? 'Copy of ' : '') + selectedAlertNotification.get('name'));
inputFields.set('groups.value', selectedAlertNotification.get('groups').toArray());
- inputFields.set('email.value', selectedAlertNotification.get('properties')['ambari.dispatch.recipients'] ?
- selectedAlertNotification.get('properties')['ambari.dispatch.recipients'].join(', ') : '');
- inputFields.set('SMTPServer.value', selectedAlertNotification.get('properties')['mail.smtp.host']);
- inputFields.set('SMTPPort.value', selectedAlertNotification.get('properties')['mail.smtp.port']);
- inputFields.set('SMTPUseAuthentication.value', selectedAlertNotification.get('properties')['mail.smtp.auth'] !== "false");
- inputFields.set('SMTPUsername.value', selectedAlertNotification.get('properties')['ambari.dispatch.credential.username']);
- inputFields.set('SMTPPassword.value', selectedAlertNotification.get('properties')['ambari.dispatch.credential.password']);
- inputFields.set('retypeSMTPPassword.value', selectedAlertNotification.get('properties')['ambari.dispatch.credential.password']);
- inputFields.set('SMTPSTARTTLS.value', selectedAlertNotification.get('properties')['mail.smtp.starttls.enable'] !== "false");
- inputFields.set('emailFrom.value', selectedAlertNotification.get('properties')['mail.smtp.from']);
- inputFields.set('version.value', selectedAlertNotification.get('properties')['ambari.dispatch.snmp.version']);
- inputFields.set('OIDs.value', selectedAlertNotification.get('properties')['ambari.dispatch.snmp.oids.trap']);
- inputFields.set('community.value', selectedAlertNotification.get('properties')['ambari.dispatch.snmp.community']);
- inputFields.set('host.value', selectedAlertNotification.get('properties')['ambari.dispatch.recipients'] ?
- selectedAlertNotification.get('properties')['ambari.dispatch.recipients'].join(', ') : '');
- inputFields.set('port.value', selectedAlertNotification.get('properties')['ambari.dispatch.snmp.port']);
+ inputFields.set('email.value', props['ambari.dispatch.recipients'] ?
+ props['ambari.dispatch.recipients'].join(', ') : '');
+ inputFields.set('SMTPServer.value', props['mail.smtp.host']);
+ inputFields.set('SMTPPort.value', props['mail.smtp.port']);
+ inputFields.set('SMTPUseAuthentication.value', props['mail.smtp.auth'] !== "false");
+ inputFields.set('SMTPUsername.value', props['ambari.dispatch.credential.username']);
+ inputFields.set('SMTPPassword.value', props['ambari.dispatch.credential.password']);
+ inputFields.set('retypeSMTPPassword.value', props['ambari.dispatch.credential.password']);
+ inputFields.set('SMTPSTARTTLS.value', props['mail.smtp.starttls.enable'] !== "false");
+ inputFields.set('emailFrom.value', props['mail.smtp.from']);
+ inputFields.set('version.value', props['ambari.dispatch.snmp.version']);
+ inputFields.set('OIDs.value', props['ambari.dispatch.snmp.oids.trap']);
+ inputFields.set('community.value', props['ambari.dispatch.snmp.community']);
+ inputFields.set('host.value', props['ambari.dispatch.recipients'] ?
+ props['ambari.dispatch.recipients'].join(', ') : '');
+ inputFields.set('port.value', props['ambari.dispatch.snmp.port']);
inputFields.set('severityFilter.value', selectedAlertNotification.get('alertStates'));
inputFields.set('global.value', selectedAlertNotification.get('global'));
inputFields.set('allGroups.value', selectedAlertNotification.get('global') ? 'all' : 'custom');
@@ -348,14 +358,13 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
inputFields.set('description.value', selectedAlertNotification.get('description'));
inputFields.set('method.value', selectedAlertNotification.get('type'));
inputFields.get('customProperties').clear();
- var properties = selectedAlertNotification.get('properties');
var ignoredCustomProperties = this.get('ignoredCustomProperties');
- Em.keys(properties).forEach(function (k) {
+ Em.keys(props).forEach(function (k) {
if (ignoredCustomProperties.contains(k)) return;
inputFields.get('customProperties').pushObject({
name: k,
- value: properties[k],
- defaultValue: properties[k]
+ value: props[k],
+ defaultValue: props[k]
});
});
},
@@ -394,7 +403,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
validationMap = self.get('validationMap');
self.get('methods').forEach(function (method) {
var validations = validationMap[method];
- if (method == currentMethod) {
+ if (method === currentMethod) {
validations.mapProperty('validator').forEach(function (key) {
this.get(key).call(this);
}, this);
@@ -418,7 +427,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
if (!newName) {
this.set('nameError', true);
errorMessage = Em.I18n.t('alerts.actions.manage_alert_notifications_popup.error.name.empty');
- } else if (newName && newName != this.get('currentName') && self.get('alertNotifications').mapProperty('name').contains(newName)) {
+ } else if (newName && newName !== this.get('currentName') && self.get('alertNotifications').mapProperty('name').contains(newName)) {
this.set('nameError', true);
errorMessage = Em.I18n.t('alerts.actions.manage_alert_notifications_popup.error.name.existed');
} else {
@@ -475,7 +484,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
hostsValidation: function() {
var inputValue = this.get('controller.inputFields.host.value').trim(),
- hostError = false;;
+ hostError = false;
if (!this.get('isEmailMethodSelected')) {
var array = inputValue.split(',');
hostError = array.some(function(hostname) {
@@ -531,7 +540,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
* @method selectAllGroups
*/
selectAllGroups: function () {
- if (this.get('controller.inputFields.allGroups.value') == 'custom') {
+ if (this.get('controller.inputFields.allGroups.value') === 'custom') {
this.set('groupSelect.selection', this.get('groupSelect.content').slice());
}
},
@@ -541,7 +550,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
* @method clearAllGroups
*/
clearAllGroups: function () {
- if (this.get('controller.inputFields.allGroups.value') == 'custom') {
+ if (this.get('controller.inputFields.allGroups.value') === 'custom') {
this.set('groupSelect.selection', []);
}
},
@@ -645,6 +654,9 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
properties['ambari.dispatch.snmp.community'] = inputFields.get('community.value');
properties['ambari.dispatch.recipients'] = inputFields.get('host.value').replace(/\s/g, '').split(',');
properties['ambari.dispatch.snmp.port'] = inputFields.get('port.value');
+ if (inputFields.get('method.value') === 'SNMP') {
+ properties['ambari.dispatch-property.script'] = "org.apache.ambari.contrib.snmp.script";
+ }
}
inputFields.get('customProperties').forEach(function (customProperty) {
properties[customProperty.name] = customProperty.value;
@@ -659,7 +671,7 @@ App.ManageAlertNotificationsController = Em.Controller.extend({
properties: properties
}
};
- if (inputFields.get('allGroups.value') == 'custom') {
+ if (inputFields.get('allGroups.value') === 'custom') {
apiObject.AlertTarget.groups = inputFields.get('groups.value').mapProperty('id');
}
return apiObject;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e139ef57/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js b/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
index 19d0c1b..283c0af 100644
--- a/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
+++ b/ambari-web/test/controllers/main/alerts/manage_alert_notifications_controller_test.js
@@ -699,56 +699,77 @@ describe('App.ManageAlertNotificationsController', function () {
describe("#formatNotificationAPIObject()", function () {
- var inputFields = Em.Object.create({
- name: {
- value: 'test_name'
- },
- groups: {
- value: [{id: 1}, {id: 2}, {id: 3}]
- },
- allGroups: {
- value: 'custom'
- },
- global: {
- value: false
- },
- method: {
- value: 'EMAIL'
- },
- email: {
- value: 'test1@test.test, test2@test.test,test3@test.test , test4@test.test'
- },
- severityFilter: {
- value: ['OK', 'CRITICAL']
- },
- SMTPServer: {
- value: 's1'
- },
- SMTPPort: {
- value: '25'
- },
- SMTPUseAuthentication: {
- value: "true"
- },
- SMTPUsername: {
- value: 'user'
- },
- SMTPPassword: {
- value: 'pass'
- },
- SMTPSTARTTLS: {
- value: "true"
- },
- emailFrom: {
- value: 'from'
- },
- description: {
- value: 'test_description'
- },
- customProperties: [
- {name: 'n1', value: 'v1'},
- {name: 'n2', value: 'v2'}
- ]
+ var inputFields;
+
+ beforeEach(function () {
+ inputFields = Em.Object.create({
+ name: {
+ value: 'test_name'
+ },
+ groups: {
+ value: [{id: 1}, {id: 2}, {id: 3}]
+ },
+ allGroups: {
+ value: 'custom'
+ },
+ global: {
+ value: false
+ },
+ method: {
+ value: 'EMAIL'
+ },
+ email: {
+ value: 'test1@test.test, test2@test.test,test3@test.test , test4@test.test'
+ },
+ severityFilter: {
+ value: ['OK', 'CRITICAL']
+ },
+ SMTPServer: {
+ value: 's1'
+ },
+ SMTPPort: {
+ value: '25'
+ },
+ SMTPUseAuthentication: {
+ value: "true"
+ },
+ SMTPUsername: {
+ value: 'user'
+ },
+ SMTPPassword: {
+ value: 'pass'
+ },
+ SMTPSTARTTLS: {
+ value: "true"
+ },
+ emailFrom: {
+ value: 'from'
+ },
+ description: {
+ value: 'test_description'
+ },
+ host: {
+ value: ''
+ },
+ customProperties: [
+ {name: 'n1', value: 'v1'},
+ {name: 'n2', value: 'v2'}
+ ]
+ });
+ });
+
+ it('should set property `ambari.dispatch-property.script` for SNMP type', function () {
+ Em.set(inputFields, 'method.value', 'SNMP');
+ controller.set('inputFields', inputFields);
+ var result = controller.formatNotificationAPIObject();
+ expect(result.AlertTarget.properties['ambari.dispatch-property.script']).to.be.equal('org.apache.ambari.contrib.snmp.script');
+ });
+
+ it('should not set property `ambari.dispatch-property.script` for EMAIL type', function () {
+ Em.set(inputFields, 'method.value', 'EMAIL');
+ controller.set('inputFields', inputFields);
+ var result = controller.formatNotificationAPIObject();
+ expect(result.AlertTarget.properties).to.not.have.property('ambari.dispatch-property.script');
});
it("should create object with properties from inputFields values", function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e139ef57/ambari-web/test/views/common/log_file_search_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/log_file_search_view_test.js b/ambari-web/test/views/common/log_file_search_view_test.js
index ca208b3..a5f940e 100644
--- a/ambari-web/test/views/common/log_file_search_view_test.js
+++ b/ambari-web/test/views/common/log_file_search_view_test.js
@@ -32,7 +32,8 @@ describe('App.LogFileSearchView', function() {
isIncluded: !!isIncluded
});
};
- var cases = [
+
+ [
{
viewContent: {
keywordsFilterValue: 'some_keyword'
[21/24] ambari git commit: AMBARI-14965: Ambari server lists service
even though service creation fails (Ajit Kumar via jluniya)
Posted by nc...@apache.org.
AMBARI-14965: Ambari server lists service even though service creation fails (Ajit Kumar via jluniya)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/73fbe14c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/73fbe14c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/73fbe14c
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 73fbe14c2a14619a5023ea0698cda72858c05fbe
Parents: 37122a6
Author: Jayush Luniya <jl...@hortonworks.com>
Authored: Tue Feb 9 15:08:42 2016 -0800
Committer: Jayush Luniya <jl...@hortonworks.com>
Committed: Tue Feb 9 15:08:42 2016 -0800
----------------------------------------------------------------------
.../internal/ServiceResourceProvider.java | 197 +++++++++----------
1 file changed, 88 insertions(+), 109 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/73fbe14c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index a2aca70..ed7659f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -64,6 +64,8 @@ import org.apache.ambari.server.state.ServiceFactory;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.State;
import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.Validate;
+
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -148,7 +150,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
ResourceAlreadyExistsException,
NoSuchParentResourceException {
- final Set<ServiceRequest> requests = new HashSet<ServiceRequest>();
+ final Set<ServiceRequest> requests = new HashSet<>();
for (Map<String, Object> propertyMap : request.getProperties()) {
requests.add(getRequest(propertyMap));
}
@@ -338,115 +340,11 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
LOG.warn("Received an empty requests set");
return;
}
-
- Clusters clusters = getManagementController().getClusters();
- AmbariMetaInfo ambariMetaInfo = getManagementController().getAmbariMetaInfo();
-
+ Clusters clusters = getManagementController().getClusters();
// do all validation checks
- Map<String, Set<String>> serviceNames = new HashMap<String, Set<String>>();
- Set<String> duplicates = new HashSet<String>();
- for (ServiceRequest request : requests) {
- if (request.getClusterName() == null
- || request.getClusterName().isEmpty()
- || request.getServiceName() == null
- || request.getServiceName().isEmpty()) {
- throw new IllegalArgumentException("Cluster name and service name"
- + " should be provided when creating a service");
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Received a createService request"
- + ", clusterName=" + request.getClusterName()
- + ", serviceName=" + request.getServiceName()
- + ", request=" + request);
- }
-
- if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(request.getClusterName()), RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) {
- throw new AuthorizationException("The user is not authorized to create services");
- }
-
- if (!serviceNames.containsKey(request.getClusterName())) {
- serviceNames.put(request.getClusterName(), new HashSet<String>());
- }
- if (serviceNames.get(request.getClusterName())
- .contains(request.getServiceName())) {
- // throw error later for dup
- duplicates.add(request.getServiceName());
- continue;
- }
- serviceNames.get(request.getClusterName()).add(request.getServiceName());
-
- if (request.getDesiredState() != null
- && !request.getDesiredState().isEmpty()) {
- State state = State.valueOf(request.getDesiredState());
- if (!state.isValidDesiredState()
- || state != State.INIT) {
- throw new IllegalArgumentException("Invalid desired state"
- + " only INIT state allowed during creation"
- + ", providedDesiredState=" + request.getDesiredState());
- }
- }
-
- Cluster cluster;
- try {
- cluster = clusters.getCluster(request.getClusterName());
- } catch (ClusterNotFoundException e) {
- throw new ParentObjectNotFoundException("Attempted to add a service to a cluster which doesn't exist", e);
- }
- try {
- Service s = cluster.getService(request.getServiceName());
- if (s != null) {
- // throw error later for dup
- duplicates.add(request.getServiceName());
- continue;
- }
- } catch (ServiceNotFoundException e) {
- // Expected
- }
-
- StackId stackId = cluster.getDesiredStackVersion();
- if (!ambariMetaInfo.isValidService(stackId.getStackName(),
- stackId.getStackVersion(), request.getServiceName())) {
- throw new IllegalArgumentException("Unsupported or invalid service"
- + " in stack"
- + ", clusterName=" + request.getClusterName()
- + ", serviceName=" + request.getServiceName()
- + ", stackInfo=" + stackId.getStackId());
- }
- }
-
- // ensure only a single cluster update
- if (serviceNames.size() != 1) {
- throw new IllegalArgumentException("Invalid arguments, updates allowed"
- + "on only one cluster at a time");
- }
-
- // Validate dups
- if (!duplicates.isEmpty()) {
- StringBuilder svcNames = new StringBuilder();
- boolean first = true;
- for (String svcName : duplicates) {
- if (!first) {
- svcNames.append(",");
- }
- first = false;
- svcNames.append(svcName);
- }
- String clusterName = requests.iterator().next().getClusterName();
- String msg;
- if (duplicates.size() == 1) {
- msg = "Attempted to create a service which already exists: "
- + ", clusterName=" + clusterName + " serviceName=" + svcNames.toString();
- } else {
- msg = "Attempted to create services which already exist: "
- + ", clusterName=" + clusterName + " serviceNames=" + svcNames.toString();
- }
- throw new DuplicateResourceException(msg);
- }
+ validateCreateRequests(requests, clusters);
ServiceFactory serviceFactory = getManagementController().getServiceFactory();
-
- // now to the real work
for (ServiceRequest request : requests) {
Cluster cluster = clusters.getCluster(request.getClusterName());
@@ -457,11 +355,10 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
s.setDesiredState(state);
s.setDesiredStackVersion(cluster.getDesiredStackVersion());
+ s.persist();
cluster.addService(s);
// Initialize service widgets
getManagementController().initializeWidgetsAndLayouts(cluster, s);
-
- s.persist();
}
}
@@ -974,4 +871,86 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
return serviceSpecificProperties;
}
+
+ private void validateCreateRequests(Set<ServiceRequest> requests, Clusters clusters)
+ throws AuthorizationException, AmbariException {
+
+ AmbariMetaInfo ambariMetaInfo = getManagementController().getAmbariMetaInfo();
+ Map<String, Set<String>> serviceNames = new HashMap<>();
+ Set<String> duplicates = new HashSet<>();
+ for (ServiceRequest request : requests) {
+ final String clusterName = request.getClusterName();
+ final String serviceName = request.getServiceName();
+ Validate.notEmpty(clusterName, "Cluster name should be provided when creating a service");
+ Validate.notEmpty(serviceName, "Service name should be provided when creating a service");
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Received a createService request"
+ + ", clusterName=" + clusterName + ", serviceName=" + serviceName + ", request=" + request);
+ }
+
+ if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) {
+ throw new AuthorizationException("The user is not authorized to create services");
+ }
+
+ if (!serviceNames.containsKey(clusterName)) {
+ serviceNames.put(clusterName, new HashSet<String>());
+ }
+
+ if (serviceNames.get(clusterName).contains(serviceName)) {
+ // throw error later for dup
+ duplicates.add(serviceName);
+ continue;
+ }
+ serviceNames.get(clusterName).add(serviceName);
+
+ if (StringUtils.isNotEmpty(request.getDesiredState())) {
+ State state = State.valueOf(request.getDesiredState());
+ if (!state.isValidDesiredState() || state != State.INIT) {
+ throw new IllegalArgumentException("Invalid desired state"
+ + " only INIT state allowed during creation"
+ + ", providedDesiredState=" + request.getDesiredState());
+ }
+ }
+
+ Cluster cluster;
+ try {
+ cluster = clusters.getCluster(clusterName);
+ } catch (ClusterNotFoundException e) {
+ throw new ParentObjectNotFoundException("Attempted to add a service to a cluster which doesn't exist", e);
+ }
+ try {
+ Service s = cluster.getService(serviceName);
+ if (s != null) {
+ // throw error later for dup
+ duplicates.add(serviceName);
+ continue;
+ }
+ } catch (ServiceNotFoundException e) {
+ // Expected
+ }
+
+ StackId stackId = cluster.getDesiredStackVersion();
+ if (!ambariMetaInfo.isValidService(stackId.getStackName(),
+ stackId.getStackVersion(), request.getServiceName())) {
+ throw new IllegalArgumentException("Unsupported or invalid service in stack, clusterName=" + clusterName
+ + ", serviceName=" + serviceName + ", stackInfo=" + stackId.getStackId());
+ }
+ }
+ // ensure only a single cluster update
+ if (serviceNames.size() != 1) {
+ throw new IllegalArgumentException("Invalid arguments, updates allowed"
+ + "on only one cluster at a time");
+ }
+
+ // Validate dups
+ if (!duplicates.isEmpty()) {
+ String clusterName = requests.iterator().next().getClusterName();
+ String msg = "Attempted to create a service which already exists: "
+ + ", clusterName=" + clusterName + " serviceName=" + StringUtils.join(duplicates, ",");
+
+ throw new DuplicateResourceException(msg);
+ }
+
+ }
}
[09/24] ambari git commit: AMBARI-14875. Ranger Smart configs : Need
to hide Audit DB properties if Audit to DB is off (gautam)
Posted by nc...@apache.org.
AMBARI-14875. Ranger Smart configs : Need to hide Audit DB properties if Audit to DB is off (gautam)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/12faf8cb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/12faf8cb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/12faf8cb
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 12faf8cbae92cfa7836e7508ec2df8f2bd41d286
Parents: d9faf74
Author: Gautam Borad <ga...@apache.org>
Authored: Tue Feb 9 14:04:56 2016 +0530
Committer: Gautam Borad <ga...@apache.org>
Committed: Tue Feb 9 15:27:36 2016 +0530
----------------------------------------------------------------------
.../services/RANGER/themes/theme_version_1.json | 20 +---------
.../services/RANGER/themes/theme_version_2.json | 40 +++++++++++++++++++-
2 files changed, 39 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/12faf8cb/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/themes/theme_version_1.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/themes/theme_version_1.json b/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/themes/theme_version_1.json
index cb5aa78..e6724cd 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/themes/theme_version_1.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/RANGER/themes/theme_version_1.json
@@ -525,25 +525,7 @@
},
{
"config": "admin-properties/audit_db_password",
- "subsection-name": "subsection-ranger-audit-db-row2-col2",
- "depends-on": [
- {
- "configs":[
- "ranger-env/xasecure.audit.destination.db"
- ],
- "if": "${ranger-env/xasecure.audit.destination.db}",
- "then": {
- "property_value_attributes": {
- "visible": true
- }
- },
- "else": {
- "property_value_attributes": {
- "visible": false
- }
- }
- }
- ]
+ "subsection-name": "subsection-ranger-audit-db-row2-col2"
}
]
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/12faf8cb/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
index 2c469a9..cbd27e4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
@@ -896,11 +896,47 @@
},
{
"config": "admin-properties/audit_db_user",
- "subsection-name": "subsection-ranger-audit-db-row2-col1"
+ "subsection-name": "subsection-ranger-audit-db-row2-col1",
+ "depends-on": [
+ {
+ "configs":[
+ "ranger-env/xasecure.audit.destination.db"
+ ],
+ "if": "${ranger-env/xasecure.audit.destination.db}",
+ "then": {
+ "property_value_attributes": {
+ "visible": true
+ }
+ },
+ "else": {
+ "property_value_attributes": {
+ "visible": false
+ }
+ }
+ }
+ ]
},
{
"config": "admin-properties/audit_db_name",
- "subsection-name": "subsection-ranger-audit-db-row2-col2"
+ "subsection-name": "subsection-ranger-audit-db-row2-col2",
+ "depends-on": [
+ {
+ "configs":[
+ "ranger-env/xasecure.audit.destination.db"
+ ],
+ "if": "${ranger-env/xasecure.audit.destination.db}",
+ "then": {
+ "property_value_attributes": {
+ "visible": true
+ }
+ },
+ "else": {
+ "property_value_attributes": {
+ "visible": false
+ }
+ }
+ }
+ ]
},
{
"config": "admin-properties/audit_db_password",
[05/24] ambari git commit: AMBARI-14846. Dependencies popup is
displayed even when all dependent services are selected (alexantonenko)
Posted by nc...@apache.org.
AMBARI-14846. Dependencies popup is displayed even when all dependent services are selected (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f905a024
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f905a024
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f905a024
Branch: refs/heads/branch-dev-patch-upgrade
Commit: f905a0243ec34df9c74cc76551cecd53a8251523
Parents: a396ff0
Author: Alex Antonenko <hi...@gmail.com>
Authored: Mon Feb 8 21:29:39 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Mon Feb 8 21:29:47 2016 +0200
----------------------------------------------------------------------
.../app/controllers/wizard/step4_controller.js | 122 +++++++++++++++----
.../test/controllers/wizard/step4_test.js | 40 +++++-
2 files changed, 136 insertions(+), 26 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f905a024/ambari-web/app/controllers/wizard/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step4_controller.js b/ambari-web/app/controllers/wizard/step4_controller.js
index 90479ac..a8ad7fc 100644
--- a/ambari-web/app/controllers/wizard/step4_controller.js
+++ b/ambari-web/app/controllers/wizard/step4_controller.js
@@ -86,13 +86,23 @@ App.WizardStep4Controller = Em.ArrayController.extend({
*/
ambariMetricsValidation: function (callback) {
var ambariMetricsService = this.findProperty('serviceName', 'AMBARI_METRICS');
- if (ambariMetricsService && !ambariMetricsService.get('isSelected')) {
- this.addValidationError({
- id: 'ambariMetricsCheck',
- type: 'WARNING',
- callback: this.ambariMetricsCheckPopup,
- callbackParams: [callback]
- });
+ if (ambariMetricsService) {
+ if(!ambariMetricsService.get('isSelected')) {
+ this.addValidationError({
+ id: 'ambariMetricsCheck',
+ type: 'WARNING',
+ callback: this.ambariMetricsCheckPopup,
+ callbackParams: [callback]
+ });
+ }
+ else {
+ //metrics is selected, remove the metrics error from errorObject array
+ var metricsError = this.get('errorStack').filterProperty('id',"ambariMetricsCheck");
+ if(metricsError)
+ {
+ this.get('errorStack').removeObject(metricsError[0]);
+ }
+ }
}
},
@@ -103,13 +113,23 @@ App.WizardStep4Controller = Em.ArrayController.extend({
*/
rangerValidation: function (callback) {
var rangerService = this.findProperty('serviceName', 'RANGER');
- if (rangerService && rangerService.get('isSelected') && !rangerService.get('isInstalled')) {
- this.addValidationError({
- id: 'rangerRequirements',
- type: 'WARNING',
- callback: this.rangerRequirementsPopup,
- callbackParams: [callback]
- });
+ if (rangerService && !rangerService.get('isInstalled')) {
+ if(rangerService.get('isSelected')) {
+ this.addValidationError({
+ id: 'rangerRequirements',
+ type: 'WARNING',
+ callback: this.rangerRequirementsPopup,
+ callbackParams: [callback]
+ });
+ }
+ else {
+ //Ranger is selected, remove the Ranger error from errorObject array
+ var rangerError = this.get('errorStack').filterProperty('id',"rangerRequirements");
+ if(rangerError)
+ {
+ this.get('errorStack').removeObject(rangerError[0]);
+ }
+ }
}
},
@@ -120,14 +140,24 @@ App.WizardStep4Controller = Em.ArrayController.extend({
*/
sparkValidation: function (callback) {
var sparkService = this.findProperty('serviceName', 'SPARK');
- if (sparkService && sparkService.get('isSelected') && !sparkService.get('isInstalled') &&
+ if (sparkService && !sparkService.get('isInstalled') &&
App.get('currentStackName') == 'HDP' && App.get('currentStackVersionNumber') == '2.2') {
- this.addValidationError({
- id: 'sparkWarning',
- type: 'WARNING',
- callback: this.sparkWarningPopup,
- callbackParams: [callback]
- });
+ if(sparkService.get('isSelected')) {
+ this.addValidationError({
+ id: 'sparkWarning',
+ type: 'WARNING',
+ callback: this.sparkWarningPopup,
+ callbackParams: [callback]
+ });
+ }
+ else {
+ //Spark is selected, remove the Spark error from errorObject array
+ var sparkError = this.get('errorStack').filterProperty('id',"sparkWarning");
+ if(sparkError)
+ {
+ this.get('errorStack').removeObject(sparkError[0]);
+ }
+ }
}
},
@@ -317,6 +347,15 @@ App.WizardStep4Controller = Em.ArrayController.extend({
callbackParams: [services, 'multipleDFS', primaryDfsDisplayName, callback]
});
}
+ else
+ {
+ //if multiple DFS are not selected, remove the related error from the error array
+ var fsError = this.get('errorStack').filterProperty('id',"multipleDFS");
+ if(fsError)
+ {
+ this.get('errorStack').removeObject(fsError[0]);
+ }
+ }
}
}
},
@@ -335,16 +374,34 @@ App.WizardStep4Controller = Em.ArrayController.extend({
if (!!requiredServices && requiredServices.length) {
requiredServices.forEach(function(_requiredService){
var requiredService = this.findProperty('serviceName', _requiredService);
- if (requiredService && requiredService.get('isSelected') === false) {
- if(missingDependencies.indexOf(_requiredService) == -1 ) {
- missingDependencies.push(_requiredService);
- missingDependenciesDisplayName.push(requiredService.get('displayNameOnSelectServicePage'));
+ if (requiredService) {
+ if(requiredService.get('isSelected') === false)
+ {
+ if(missingDependencies.indexOf(_requiredService) == -1 ) {
+ missingDependencies.push(_requiredService);
+ missingDependenciesDisplayName.push(requiredService.get('displayNameOnSelectServicePage'));
+ }
}
+ else
+ {
+ //required service is selected, remove the service error from errorObject array
+ var serviceName = requiredService.get('serviceName');
+ var serviceError = this.get('errorStack').filterProperty('id',"serviceCheck_"+serviceName);
+ if(serviceError)
+ {
+ this.get('errorStack').removeObject(serviceError[0]);
+ }
+ }
}
},this);
}
},this);
+ //create a copy of the errorStack, reset it
+ //and add the dependencies in the correct order
+ var errorStackCopy = this.get('errorStack');
+ this.set('errorStack', []);
+
if (missingDependencies.length > 0) {
for(var i = 0; i < missingDependencies.length; i++) {
this.addValidationError({
@@ -352,7 +409,22 @@ App.WizardStep4Controller = Em.ArrayController.extend({
callback: this.needToAddServicePopup,
callbackParams: [{serviceName: missingDependencies[i], selected: true}, 'serviceCheck', missingDependenciesDisplayName[i], callback]
});
+ }
+ }
+
+ //iterate through the errorStackCopy array and add to errorStack array, the error objects that have no matching entry in the errorStack
+ //and that are not related to serviceChecks since serviceCheck errors have already been added when iterating through the missing dependencies list
+ //Only add Ranger, Ambari Metrics, Spark and file system service validation errors if they exist in the errorStackCopy array
+ var ctr = 0;
+ while(ctr < errorStackCopy.length) {
+ //no matching entry in errorStack array
+ if (!this.get('errorStack').someProperty('id', errorStackCopy[ctr].id)) {
+ //not serviceCheck error
+ if(!errorStackCopy[ctr].id.startsWith('serviceCheck_')) {
+ this.get('errorStack').push(this.createError(errorStackCopy[ctr]));
+ }
}
+ ctr++;
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/f905a024/ambari-web/test/controllers/wizard/step4_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard/step4_test.js b/ambari-web/test/controllers/wizard/step4_test.js
index 80667c9..7796d61 100644
--- a/ambari-web/test/controllers/wizard/step4_test.js
+++ b/ambari-web/test/controllers/wizard/step4_test.js
@@ -25,7 +25,7 @@ describe('App.WizardStep4Controller', function () {
var services = [
'HDFS', 'GANGLIA', 'OOZIE', 'HIVE', 'HBASE', 'PIG', 'SCOOP', 'ZOOKEEPER',
- 'YARN', 'MAPREDUCE2', 'FALCON', 'TEZ', 'STORM', 'AMBARI_METRICS', 'RANGER', 'SPARK'
+ 'YARN', 'MAPREDUCE2', 'FALCON', 'TEZ', 'STORM', 'AMBARI_METRICS', 'RANGER', 'SPARK', 'SLIDER'
];
var controller = App.WizardStep4Controller.create();
@@ -625,6 +625,44 @@ describe('App.WizardStep4Controller', function () {
})
});
+ describe('#serviceDependencyValidation', function () {
+
+ var cases = [
+ {
+ services: ['HBASE'],
+ dependentServices: ['HDFS', 'ZOOKEEPER'],
+ title: 'HBASE selected and HDFS not selected initially'
+ },
+ {
+ services: ['TEZ', 'HDFS'],
+ dependentServices: ['ZOOKEEPER', 'YARN'],
+ title: 'TEZ selected and ZOOKEEPER not selected initially'
+ }
+ ];
+
+ beforeEach(function() {
+ controller.clear();
+ controller.set('errorStack', []);
+ });
+
+ cases.forEach(function (item) {
+ it(item.title, function () {
+ controller.set('content', generateSelectedServicesContent(item.services));
+ controller.serviceDependencyValidation();
+ expect(controller.get('errorStack').mapProperty('id').contains("serviceCheck_"+item.dependentServices[0])).to.equal(true);
+ expect(controller.get('errorStack').mapProperty('id').contains("serviceCheck_"+item.dependentServices[1])).to.equal(true);
+ controller.findProperty('serviceName', item.dependentServices[0]).set('isSelected', true);
+
+ //simulate situation where user clicks cancel on error for first dependent service and then selects it in which case
+ //serviceDependencyValidation() will be called again
+ controller.serviceDependencyValidation();
+ //error for first dependent service must be removed from errorStack array
+ expect(controller.get('errorStack').mapProperty('id').contains("serviceCheck_"+item.dependentServices[0])).to.equal(false);
+ expect(controller.get('errorStack').mapProperty('id').contains("serviceCheck_"+item.dependentServices[1])).to.equal(true);
+ });
+ });
+ });
+
describe('#ambariMetricsValidation', function () {
var cases = [
[18/24] ambari git commit: AMBARI-14974 Ambari maven builds fail on
trunk (dsen)
Posted by nc...@apache.org.
AMBARI-14974 Ambari maven builds fail on trunk (dsen)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/38a52256
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/38a52256
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/38a52256
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 38a52256e0d421fe52beef4e4860b990000676b5
Parents: 400d495
Author: Dmytro Sen <ds...@apache.org>
Authored: Tue Feb 9 18:03:29 2016 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Tue Feb 9 18:03:29 2016 +0200
----------------------------------------------------------------------
ambari-metrics/ambari-metrics-assembly/pom.xml | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/38a52256/ambari-metrics/ambari-metrics-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml b/ambari-metrics/ambari-metrics-assembly/pom.xml
index 130097d..941c3aa 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -89,7 +89,7 @@
<executions>
<execution>
<id>collector</id>
- <phase>package</phase>
+ <phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
@@ -105,7 +105,7 @@
</execution>
<execution>
<id>monitor</id>
- <phase>package</phase>
+ <phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
@@ -121,7 +121,7 @@
</execution>
<execution>
<id>grafana</id>
- <phase>package</phase>
+ <phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
@@ -137,7 +137,7 @@
</execution>
<execution>
<id>hadoop-sink</id>
- <phase>package</phase>
+ <phase>prepare-package</phase>
<goals>
<goal>single</goal>
</goals>
@@ -175,7 +175,7 @@
<executions>
<execution>
<id>copy-resources</id>
- <phase>prepare-package</phase>
+ <phase>package</phase>
<goals>
<goal>copy-resources</goal>
</goals>
@@ -611,7 +611,7 @@
<executions>
<execution>
<id>copy-resources</id>
- <phase>prepare-package</phase>
+ <phase>package</phase>
<goals>
<goal>copy-resources</goal>
</goals>
[04/24] ambari git commit: AMBARI-14938. Some user-specified
auth-to-local rules fail to render when auto generating auth-to-local rules
(rlevas)
Posted by nc...@apache.org.
AMBARI-14938. Some user-specified auth-to-local rules fail to render when auto generating auth-to-local rules (rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a396ff02
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a396ff02
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a396ff02
Branch: refs/heads/branch-dev-patch-upgrade
Commit: a396ff0203093ba80460657b4f7a4fecfb2b7ed1
Parents: e139ef5
Author: Robert Levas <rl...@hortonworks.com>
Authored: Mon Feb 8 13:39:30 2016 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Mon Feb 8 13:39:30 2016 -0500
----------------------------------------------------------------------
.../server/controller/AuthToLocalBuilder.java | 287 ++++++++++-------
.../server/controller/KerberosHelperImpl.java | 15 +-
.../controller/AuthToLocalBuilderTest.java | 315 ++++++++++++-------
3 files changed, 381 insertions(+), 236 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a396ff02/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java
index a8fc487..9d6db0a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AuthToLocalBuilder.java
@@ -18,7 +18,10 @@
package org.apache.ambari.server.controller;
+import org.apache.commons.lang.StringUtils;
+
import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@@ -40,19 +43,18 @@ import java.util.regex.Pattern;
* <p/>
* Unqualified Principal (only user is specified):
* RULE:[1:$1@$0](PRIMARY@REALM)s/.*\/LOCAL_USERNAME/
- * <p>
+ * <p/>
* Additionally, for each realm included in the rule set, generate a default realm rule
* in the format: RULE:[1:$1@$0](.*@REALM)s/@.{@literal *}//
- * <p>
+ * <p/>
* Ordering guarantees for the generated rule string are as follows:
* <ul>
- * <li>Rules with the same expected component count are ordered according to match component count</li>
- * <li>Rules with different expected component count are ordered according to the default string ordering</li>
- * <li>Rules in the form of .*@REALM are ordered after all other rules with the same expected component count</li>
+ * <li>Rules with the same expected component count are ordered according to match component count</li>
+ * <li>Rules with different expected component count are ordered according to the default string ordering</li>
+ * <li>Rules in the form of .*@REALM are ordered after all other rules with the same expected component count</li>
* </ul>
- *
*/
-public class AuthToLocalBuilder {
+public class AuthToLocalBuilder implements Cloneable {
public static final ConcatenationType DEFAULT_CONCATENATION_TYPE = ConcatenationType.NEW_LINES;
/**
@@ -60,43 +62,62 @@ public class AuthToLocalBuilder {
*/
private Set<Rule> setRules = new TreeSet<Rule>();
-
/**
- * A flag indicating whether case insensitive support to the local username has been requested. This will append an //L switch to the generic realm rule
+ * The default realm.
*/
- private boolean caseInsensitiveUser;
+ private final String defaultRealm;
/**
* A set of additional realm names to reference when generating rules.
*/
- private Set<String> additionalRealms = new HashSet<String>();
+ private final Set<String> additionalRealms;
+
/**
- * Default constructor. Case insensitive support false by default
+ * A flag indicating whether case insensitive support to the local username has been requested. This will append an //L switch to the generic realm rule
+ */
+ private boolean caseInsensitiveUser;
+
+ /**
+ * Constructs a new AuthToLocalBuilder.
+ *
+ * @param defaultRealm a String declaring the default realm
+ * @param additionalRealms a String containing a comma-delimited list of realm names
+ * to incorporate into the generated rule set
+ * @param caseInsensitiveUserSupport true indicating that case-insensitivity should be enabled;
+ * false otherwise
*/
- public AuthToLocalBuilder() {
- this(false, null);
+ public AuthToLocalBuilder(String defaultRealm, String additionalRealms, boolean caseInsensitiveUserSupport) {
+ this(defaultRealm, splitDelimitedString(additionalRealms), caseInsensitiveUserSupport);
}
/**
* Constructs a new AuthToLocalBuilder.
*
+ * @param defaultRealm a String declaring the default realm
+ * @param additionalRealms a collection of Strings declaring the set of realm names to
+ * incorporate into the generated rule set
* @param caseInsensitiveUserSupport true indicating that case-insensitivity should be enabled;
* false otherwise
- * @param additionalRealms a String containing a comma-delimited list of realm names to generate
- * default auth-to-local rules for
*/
- public AuthToLocalBuilder(boolean caseInsensitiveUserSupport, String additionalRealms) {
+ public AuthToLocalBuilder(String defaultRealm, Collection<String> additionalRealms, boolean caseInsensitiveUserSupport) {
+ this.defaultRealm = defaultRealm;
+
+ this.additionalRealms = (additionalRealms == null)
+ ? Collections.<String>emptySet()
+ : Collections.unmodifiableSet(new HashSet<String>(additionalRealms));
+
this.caseInsensitiveUser = caseInsensitiveUserSupport;
+ }
- if ((additionalRealms != null) && !additionalRealms.isEmpty()) {
- for (String realm : additionalRealms.split("\\s*(?:\\r?\\n|,)\\s*")) {
- realm = realm.trim();
- if (!realm.isEmpty()) {
- this.additionalRealms.add(realm);
- }
- }
- }
+ @Override
+ public Object clone() throws CloneNotSupportedException {
+ AuthToLocalBuilder copy = (AuthToLocalBuilder) super.clone();
+
+ /* **** Copy mutable members **** */
+ copy.setRules = new TreeSet<Rule>(setRules);
+
+ return copy;
}
/**
@@ -106,15 +127,13 @@ public class AuthToLocalBuilder {
* @param authToLocalRules config property value containing the existing rules
*/
public void addRules(String authToLocalRules) {
- if (authToLocalRules != null && ! authToLocalRules.isEmpty()) {
+ if (!StringUtils.isEmpty(authToLocalRules)) {
String[] rules = authToLocalRules.split("RULE:|DEFAULT");
for (String r : rules) {
r = r.trim();
- if (! r.isEmpty()) {
+ if (!r.isEmpty()) {
Rule rule = createRule(r);
setRules.add(rule);
- // ensure that a default rule is added for each realm
- addDefaultRealmRule(rule.getPrincipal());
}
}
}
@@ -139,9 +158,7 @@ public class AuthToLocalBuilder {
* @throws IllegalArgumentException if the provided principal doesn't contain a realm element
*/
public void addRule(String principal, String localUsername) {
- if ((principal != null) && (localUsername != null) &&
- !principal.isEmpty() && !localUsername.isEmpty()) {
-
+ if (!StringUtils.isEmpty(principal) && !StringUtils.isEmpty(localUsername)) {
Principal p = new Principal(principal);
if (p.getRealm() == null) {
throw new IllegalArgumentException(
@@ -158,13 +175,12 @@ public class AuthToLocalBuilder {
* Generates the auth_to_local rules used by configuration settings such as core-site/auth_to_local.
* <p/>
* Each rule is concatenated using the default ConcatenationType, like calling
- * {@link #generate(String, ConcatenationType)} with {@link #DEFAULT_CONCATENATION_TYPE}
+ * {@link #generate(ConcatenationType)} with {@link #DEFAULT_CONCATENATION_TYPE}
*
- * @param realm a string declaring the realm to use in rule set
* @return a string containing the generated auth-to-local rule set
*/
- public String generate(String realm) {
- return generate(realm, null);
+ public String generate() {
+ return generate(null);
}
/**
@@ -175,14 +191,15 @@ public class AuthToLocalBuilder {
* If the concatenation type is <code>null</code>, the default concatenation type is assumed -
* see {@link #DEFAULT_CONCATENATION_TYPE}.
*
- * @param realm a string declaring the realm to use in rule set
* @param concatenationType the concatenation type to use to generate the rule set string
* @return a string containing the generated auth-to-local rule set
*/
- public String generate(String realm, ConcatenationType concatenationType) {
+ public String generate(ConcatenationType concatenationType) {
StringBuilder builder = new StringBuilder();
// ensure that a default rule is added for this realm
- setRules.add(createDefaultRealmRule(realm));
+ if (!StringUtils.isEmpty(defaultRealm)) {
+ setRules.add(createDefaultRealmRule(defaultRealm));
+ }
// ensure that a default realm rule is added for the specified additional realms
for (String additionalRealm : additionalRealms) {
@@ -233,11 +250,11 @@ public class AuthToLocalBuilder {
* Add a default realm rule for the realm associated with a principal.
* If the realm is null or is a wildcard ".*" then no rule id added.
*
- * @param principal principal which contains the realm
+ * @param principal principal which contains the realm
*/
private void addDefaultRealmRule(Principal principal) {
String realm = principal.getRealm();
- if (realm != null && ! realm.equals(".*")) {
+ if (realm != null && !realm.equals(".*")) {
setRules.add(createDefaultRealmRule(realm));
}
}
@@ -245,9 +262,8 @@ public class AuthToLocalBuilder {
/**
* Create a rule that expects 2 components in the principal and ignores hostname in the comparison.
*
- * @param principal principal
- * @param localUser local user
- *
+ * @param principal principal
+ * @param localUser local user
* @return a new rule that ignores hostname in the comparison
*/
private Rule createHostAgnosticRule(Principal principal, String localUser) {
@@ -262,44 +278,47 @@ public class AuthToLocalBuilder {
/**
* Create a default rule for a realm which matches all principals with 1 component and the same realm.
*
- * @param realm realm that the rule is being created for
- *
- * @return a new default realm rule
+ * @param realm realm that the rule is being created for
+ * @return a new default realm rule
*/
private Rule createDefaultRealmRule(String realm) {
String caseSensitivityRule = caseInsensitiveUser ? "/L" : "";
return new Rule(new Principal(String.format(".*@%s", realm)),
- 1, 1, String.format("RULE:[1:$1@$0](.*@%s)s/@.*//" + caseSensitivityRule, realm));
+ 1, 1, String.format("RULE:[1:$1@$0](.*@%s)s/@.*//" + caseSensitivityRule, realm));
}
/**
* Create a rule from an existing string representation.
- * @param rule string representation of a rule
*
- * @return a new rule which matches the provided string representation
+ * @param rule string representation of a rule
+ * @return a new rule which matches the provided string representation
*/
private Rule createRule(String rule) {
return new Rule(rule.startsWith("RULE:") ? rule : String.format("RULE:%s", rule));
}
/**
- * Creates and returns a deep copy of this AuthToLocalBuilder.
+ * Given a comma or line delimited list of strings, returns a collection of non-empty strings.
*
- * @return a deep copy of this AuthToLocalBuilder
+ * @param string a string to split
+ * @return an array of non-empty strings or null if the source string is empty or null
*/
- public AuthToLocalBuilder copy() {
- AuthToLocalBuilder copy = new AuthToLocalBuilder();
+ private static Collection<String> splitDelimitedString(String string) {
+ Collection<String> collection = null;
- // TODO: This needs to be done in a loop rather than use Set.addAll because there may be an issue
- // TODO: with the Rule.compareTo method?
- for(Rule rule:setRules) {
- copy.setRules.add(rule);
+ if (!StringUtils.isEmpty(string)) {
+ collection = new HashSet<String>();
+
+ for (String realm : string.split("\\s*(?:\\r?\\n|,)\\s*")) {
+ realm = realm.trim();
+ if (!realm.isEmpty()) {
+ collection.add(realm);
+ }
+ }
}
- copy.caseInsensitiveUser = this.caseInsensitiveUser;
- copy.additionalRealms.addAll(this.additionalRealms);
- return copy;
+ return collection;
}
@@ -311,7 +330,7 @@ public class AuthToLocalBuilder {
* pattern used to parse existing rules
*/
private static final Pattern PATTERN_RULE_PARSE =
- Pattern.compile("RULE:\\s*\\[\\s*(\\d)\\s*:\\s*(.+?)(?:@(.+?))??\\s*\\]\\s*\\((.+?)\\)\\s*([^\\\\\\n]*)(.|\\n)*");
+ Pattern.compile("RULE:\\s*\\[\\s*(\\d)\\s*:\\s*(.+?)(?:@(.+?))??\\s*\\]\\s*\\((.+?)\\)\\s*s/(.*?)/(.*?)/([a-zA-Z]*)(?:.|\n)*");
/**
* associated principal
@@ -336,10 +355,10 @@ public class AuthToLocalBuilder {
/**
* Constructor.
*
- * @param principal principal
- * @param expectedComponentCount number of components needed by a principal to match
- * @param matchComponentCount number of components which are included in the rule evaluation
- * @param rule string representation of the rule
+ * @param principal principal
+ * @param expectedComponentCount number of components needed by a principal to match
+ * @param matchComponentCount number of components which are included in the rule evaluation
+ * @param rule string representation of the rule
*/
public Rule(Principal principal, int expectedComponentCount, int matchComponentCount, String rule) {
this.principal = principal;
@@ -351,12 +370,12 @@ public class AuthToLocalBuilder {
/**
* Constructor.
*
- * @param rule string representation of the rule
+ * @param rule string representation of the rule
*/
public Rule(String rule) {
//this.rule = rule;
Matcher m = PATTERN_RULE_PARSE.matcher(rule);
- if (! m.matches()) {
+ if (!m.matches()) {
throw new IllegalArgumentException("Invalid rule: " + rule);
}
expectedComponentCount = Integer.valueOf(m.group(1));
@@ -365,18 +384,20 @@ public class AuthToLocalBuilder {
matchComponentCount = (matchPattern.startsWith("$") ?
matchPattern.substring(1) :
matchPattern).
- split("\\$").length;
+ split("\\$").length;
String patternRealm = m.group(3);
principal = new Principal(m.group(4));
- String replacementRule = m.group(5);
+ String replacementPattern = m.group(5);
+ String replacementReplacement = m.group(6);
+ String replacementModifier = m.group(7);
if (patternRealm != null) {
- this.rule = String.format("RULE:[%d:%s@%s](%s)%s",
+ this.rule = String.format("RULE:[%d:%s@%s](%s)s/%s/%s/%s",
expectedComponentCount, matchPattern, patternRealm,
- principal.toString(), replacementRule);
+ principal.toString(), replacementPattern, replacementReplacement, replacementModifier);
} else {
- this.rule = String.format("RULE:[%d:%s](%s)%s",
+ this.rule = String.format("RULE:[%d:%s](%s)s/%s/%s/%s",
expectedComponentCount, matchPattern,
- principal.toString(), replacementRule);
+ principal.toString(), replacementPattern, replacementReplacement, replacementModifier);
}
}
@@ -422,55 +443,52 @@ public class AuthToLocalBuilder {
/**
* Compares rules.
- * <p>
+ * <p/>
* For rules with different expected component counts, the default string comparison is used.
* For rules with the same expected component count rules are ordered so that rules with a higher
* match component count occur first.
- * <p>
+ * <p/>
* For rules with the same expected component count, default realm rules in the form of
* .*@myRealm.com are ordered last.
*
- * @param other the other rule to compare
- *
+ * @param other the other rule to compare
* @return a negative integer, zero, or a positive integer as this object is less than,
- * equal to, or greater than the specified object
+ * equal to, or greater than the specified object
*/
@Override
public int compareTo(Rule other) {
- Principal thatPrincipal = other.getPrincipal();
- //todo: better implementation that recursively evaluates realm and all components
- if (expectedComponentCount != other.getExpectedComponentCount()) {
- return rule.compareTo(other.rule);
- } else {
- if (matchComponentCount != other.getMatchComponentCount()) {
- return other.getMatchComponentCount() - matchComponentCount;
- } else {
- if (principal.equals(thatPrincipal)) {
- return rule.compareTo(other.rule);
+ int retVal = expectedComponentCount - other.getExpectedComponentCount();
+
+ if (retVal == 0) {
+ retVal = other.getMatchComponentCount() - matchComponentCount;
+
+ if (retVal == 0) {
+ Principal otherPrincipal = other.getPrincipal();
+ if (principal.equals(otherPrincipal)) {
+ retVal = rule.compareTo(other.rule);
} else {
// check for wildcard realms '.*'
String realm = principal.getRealm();
- String thatRealm = thatPrincipal.getRealm();
- if (realm == null ? thatRealm != null : ! realm.equals(thatRealm)) {
- if (realm != null && realm.equals(".*")) {
- return 1;
- } else if (thatRealm != null && thatRealm.equals(".*")) {
- return -1;
+ String otherRealm = otherPrincipal.getRealm();
+ retVal = compareValueWithWildcards(realm, otherRealm);
+
+ if (retVal == 0) {
+ for (int i = 1; i <= matchComponentCount; i++) {
+ // check for wildcard component
+ String component1 = principal.getComponent(1);
+ String otherComponent1 = otherPrincipal.getComponent(1);
+ retVal = compareValueWithWildcards(component1, otherComponent1);
+
+ if (retVal != 0) {
+ break;
+ }
}
}
- // check for wildcard component 1
- String component1 = principal.getComponent(1);
- String thatComponent1 = thatPrincipal.getComponent(1);
- if (component1 != null && component1.equals(".*")) {
- return 1;
- } else if(thatComponent1 != null && thatComponent1.equals(".*")) {
- return -1;
- } else {
- return rule.compareTo(other.rule);
- }
}
}
}
+
+ return retVal;
}
@Override
@@ -482,6 +500,42 @@ public class AuthToLocalBuilder {
public int hashCode() {
return rule.hashCode();
}
+
+ /**
+ * Compares 2 strings for use in compareTo methods but orders <code>null</code>s first and wildcards last.
+ * <p/>
+ * Rules:
+ * <ul>
+ * <li><code>null</code> is ordered before any other string except for <code>null</code>, which is considered be equal</li>
+ * <li><code>.*</code> is ordered after any other string except for <code>.*</code>, which is considered equal</li>
+ * <li>All other values are order based on the result of {@link String#compareTo(String)}</li>
+ * </ul>
+ *
+ * @param s1 the first string to be compared.
+ * @param s2 the second string to be compared.
+ * @return a negative integer, zero, or a positive integer as the first argument is less than,
+ * equal to, or greater than the second.
+ * @see Comparable#compareTo(Object)
+ */
+ private int compareValueWithWildcards(String s1, String s2) {
+ if (s1 == null) {
+ if (s2 == null) {
+ return 0;
+ } else {
+ return -1;
+ }
+ } else if (s2 == null) {
+ return 1;
+ } else if (s1.equals(s2)) {
+ return 0;
+ } else if (s1.equals(".*")) {
+ return 1;
+ } else if (s2.equals(".*")) {
+ return -1;
+ } else {
+ return s1.compareTo(s2);
+ }
+ }
}
/**
@@ -512,7 +566,7 @@ public class AuthToLocalBuilder {
/**
* Constructor.
*
- * @param principal string representation of the principal
+ * @param principal string representation of the principal
*/
public Principal(String principal) {
this.principal = principal;
@@ -547,7 +601,6 @@ public class AuthToLocalBuilder {
* Uses the range 1-n to match the notation used in the rule.
*
* @param position position of the component in the range 1-n
- *
* @return the component at the specified location or null
*/
public String getComponent(int position) {
@@ -574,16 +627,20 @@ public class AuthToLocalBuilder {
@Override
public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
Principal principal1 = (Principal) o;
return components.equals(principal1.components) &&
- principal.equals(principal1.principal) &&
- !(realm != null ?
- !realm.equals(principal1.realm) :
- principal1.realm != null);
+ principal.equals(principal1.principal) &&
+ !(realm != null ?
+ !realm.equals(principal1.realm) :
+ principal1.realm != null);
}
@@ -622,10 +679,10 @@ public class AuthToLocalBuilder {
* @return a ConcatenationType
*/
public static ConcatenationType translate(String value) {
- if(value != null) {
+ if (value != null) {
value = value.trim();
- if(!value.isEmpty()) {
+ if (!value.isEmpty()) {
return valueOf(value.toUpperCase());
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a396ff02/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 556bed8..fe1ba46 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -664,7 +664,7 @@ public class KerberosHelperImpl implements KerberosHelper {
String additionalRealms = kerberosDescriptor.getProperty("additional_realms");
// Determine which properties need to be set
- AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder(caseInsensitiveUser, additionalRealms);
+ AuthToLocalBuilder authToLocalBuilder = new AuthToLocalBuilder(realm, additionalRealms, caseInsensitiveUser);
addIdentities(authToLocalBuilder, kerberosDescriptor.getIdentities(), null, existingConfigurations);
authToLocalProperties = kerberosDescriptor.getAuthToLocalProperties();
@@ -750,7 +750,14 @@ public class KerberosHelperImpl implements KerberosHelper {
Matcher m = KerberosDescriptor.AUTH_TO_LOCAL_PROPERTY_SPECIFICATION_PATTERN.matcher(authToLocalProperty);
if (m.matches()) {
- AuthToLocalBuilder builder = authToLocalBuilder.copy();
+ AuthToLocalBuilder builder;
+ try {
+ builder = (AuthToLocalBuilder) authToLocalBuilder.clone();
+ } catch (CloneNotSupportedException e) {
+ LOG.error("Failed to clone the AuthToLocalBuilder: " + e.getLocalizedMessage(), e);
+ throw new AmbariException("Failed to clone the AuthToLocalBuilder: " + e.getLocalizedMessage(), e);
+ }
+
String configType = m.group(1);
String propertyName = m.group(2);
@@ -773,8 +780,8 @@ public class KerberosHelperImpl implements KerberosHelper {
kerberosConfigurations.put(configType, kerberosConfiguration);
}
- kerberosConfiguration.put(propertyName, builder.generate(realm,
- AuthToLocalBuilder.ConcatenationType.translate(m.group(3))));
+ kerberosConfiguration.put(propertyName,
+ builder.generate(AuthToLocalBuilder.ConcatenationType.translate(m.group(3))));
}
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/a396ff02/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java
index 122e632..c88acc1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java
@@ -22,6 +22,7 @@ import org.apache.ambari.server.utils.CollectionPresentationUtils;
import org.junit.Test;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import static org.junit.Assert.*;
@@ -30,7 +31,7 @@ public class AuthToLocalBuilderTest {
@Test
public void testRuleGeneration() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
// Duplicate principal for secondary namenode, should be filtered out...
@@ -46,22 +47,22 @@ public class AuthToLocalBuilderTest {
assertEquals(
"RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
- "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
- "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
public void testRuleGeneration_caseInsensitiveSupport() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder(true, null);
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), true);
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
// Duplicate principal for secondary namenode, should be filtered out...
@@ -76,30 +77,30 @@ public class AuthToLocalBuilderTest {
builder.addRule("foobar@EXAMPLE.COM", "hdfs");
assertEquals(
- "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*///L\n" +
- "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
- "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
- "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*///L\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
public void testRuleGeneration_ExistingRules() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
// previously generated non-host specific rules
builder.addRule("foobar@EXAMPLE.COM", "hdfs");
// doesn't exist in latter generation
builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase");
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
- String existingRules = builder.generate("EXAMPLE.COM");
+ String existingRules = builder.generate();
- builder = new AuthToLocalBuilder();
+ builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
// set previously existing rules
builder.addRules(existingRules);
@@ -115,32 +116,32 @@ public class AuthToLocalBuilderTest {
assertEquals(
"RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
- "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
- "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
public void testRuleGeneration_ExistingRules_existingMoreSpecificRule() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
// previously generated non-host specific rules
builder.addRule("foobar@EXAMPLE.COM", "hdfs");
builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase");
builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs");
- String existingRules = builder.generate("EXAMPLE.COM");
+ String existingRules = builder.generate();
// prepend host specific rule
existingRules = "RULE:[2:$1/$2@$0](dn/somehost.com@EXAMPLE.COM)s/.*/hdfs/\n" + existingRules;
// append default realm rule for additional realm
existingRules += "\nRULE:[1:$1@$0](.*@OTHER_REALM.COM)s/@.*//";
- builder = new AuthToLocalBuilder();
+ builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
// set previously existing rules
builder.addRules(existingRules);
// more specific host qualifed rule exists for dn
@@ -158,29 +159,29 @@ public class AuthToLocalBuilderTest {
assertEquals(
"RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "RULE:[1:$1@$0](.*@OTHER_REALM.COM)s/@.*//\n" +
- "RULE:[2:$1/$2@$0](dn/somehost.com@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
- "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
- "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[1:$1@$0](.*@OTHER_REALM.COM)s/@.*//\n" +
+ "RULE:[2:$1/$2@$0](dn/somehost.com@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
public void testAddNullExistingRule() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRules(null);
assertEquals(
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM")
+ "DEFAULT",
+ builder.generate()
);
}
@@ -194,7 +195,7 @@ public class AuthToLocalBuilderTest {
"RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\\\\\\" +
"RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\\/\\";
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRules(rules);
assertEquals(
@@ -205,80 +206,116 @@ public class AuthToLocalBuilderTest {
"RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
"RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
"DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ builder.generate());
}
@Test
+ public void testRuleRegexWithComplexReplacements() {
+ String rules =
+ "RULE:[1:$1@$0](foobar@\\QEXAMPLE1.COM\\E$)s/.*@\\QEXAMPLE1.COM\\E$/hdfs/\n" +
+ "RULE:[1:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//\n" +
+ "RULE:[2:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//";
+
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
+ builder.addRules(rules);
+
+ builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
+ builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs");
+ builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs");
+ builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn");
+ builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred");
+ builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase");
+ builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase");
+ builder.addRule("ambari-qa-c1@EXAMPLE.COM", "ambari-qa");
+
+ assertEquals(
+ "RULE:[1:$1@$0](ambari-qa-c1@EXAMPLE.COM)s/.*/ambari-qa/\n" +
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[1:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//\n" +
+ "RULE:[1:$1@$0](foobar@\\QEXAMPLE1.COM\\E$)s/.*@\\QEXAMPLE1.COM\\E$/hdfs/\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//\n" +
+ "DEFAULT",
+ builder.generate());
+ }
+
+ @Test
public void testRulesWithWhitespace() {
String rulesWithWhitespace =
"RULE: [1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[ 1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "RULE:[2: $1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0 ](hm@EXAMPLE.COM)s/.*/hbase/\n" +
- "RULE:[2:$1@$0] (jhs@EXAMPLE.COM)s/.*/mapred/\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM) s/.*/hdfs/\n";
+ "RULE:[ 1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[2: $1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0 ](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0] (jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM) s/.*/hdfs/\n";
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRules(rulesWithWhitespace);
assertEquals(
"RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
- "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
public void testExistingRuleWithNoRealm() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRules("RULE:[1:$1](foobar)s/.*/hdfs/");
assertEquals(
"RULE:[1:$1](foobar)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
public void testExistingRuleWithNoRealm2() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRules("RULE:[1:$1/$2](foobar/someHost)s/.*/hdfs/");
assertEquals(
"RULE:[1:$1/$2](foobar/someHost)s/.*/hdfs/\n" +
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "DEFAULT",
+ builder.generate());
}
- @Test(expected=IllegalArgumentException.class)
+ @Test(expected = IllegalArgumentException.class)
public void testAddNewRuleWithNoRealm() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRule("someUser", "hdfs");
}
- @Test(expected=IllegalArgumentException.class)
+ @Test(expected = IllegalArgumentException.class)
public void testAddNewRuleWithNoRealm2() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRule("someUser/someHost", "hdfs");
}
@Test
public void testExistingWildcardRealm() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRules("RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n" +
- "RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/yarn/\n" +
- "DEFAULT");
+ "RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/yarn/\n" +
+ "DEFAULT");
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs");
@@ -287,18 +324,18 @@ public class AuthToLocalBuilderTest {
// other rules with the same number of expected principal components
assertEquals(
"RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/yarn/\n" +
- "RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n" +
- "RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n" +
- "DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n" +
+ "DEFAULT",
+ builder.generate());
}
@Test
- public void testCopy() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder();
+ public void testClone() throws CloneNotSupportedException {
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs");
@@ -310,16 +347,18 @@ public class AuthToLocalBuilderTest {
builder.addRule("foobar@EXAMPLE.COM", "hdfs");
- AuthToLocalBuilder copy = builder.copy();
-
+ AuthToLocalBuilder copy = (AuthToLocalBuilder) builder.clone();
assertNotSame(builder, copy);
- assertEquals(copy.generate("EXAMPLE.COM"), builder.generate("EXAMPLE.COM"));
+ assertEquals(builder.generate(), copy.generate());
+ // Ensure that mutable fields do not change the copy when changed in the original
+ builder.addRule("user@EXAMPLE.COM", "hdfs");
+ assertTrue(!copy.generate().equals(builder.generate()));
}
@Test
public void testAdditionalRealms() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder(false, "REALM2,REALM3, REALM1 ");
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", "REALM2,REALM3, REALM1 ", false);
builder.addRules(
"RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//\n" +
@@ -334,26 +373,26 @@ public class AuthToLocalBuilderTest {
builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase");
// Depends on hashing, string representation can be different
- List<String> rules = Arrays.asList(new String[]{"RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//",
- "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//",
- "RULE:[1:$1@$0](.*@REALM2)s/@.*//",
- "RULE:[1:$1@$0](.*@REALM1)s/@.*//",
- "RULE:[1:$1@$0](.*@REALM3)s/@.*//",
- "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/",
- "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/",
- "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/",
- "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/",
- "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/",
- "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/",
- "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/",
- "DEFAULT"});
- assertTrue(CollectionPresentationUtils.isStringPermutationOfCollection(builder.generate("EXAMPLE.COM"), rules,
- "\n", 0, 0));
+ List<String> rules = Arrays.asList("RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//",
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//",
+ "RULE:[1:$1@$0](.*@REALM2)s/@.*//",
+ "RULE:[1:$1@$0](.*@REALM1)s/@.*//",
+ "RULE:[1:$1@$0](.*@REALM3)s/@.*//",
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/",
+ "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/",
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/",
+ "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/",
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/",
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/",
+ "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/",
+ "DEFAULT");
+ assertTrue(CollectionPresentationUtils.isStringPermutationOfCollection(builder.generate(), rules,
+ "\n", 0, 0));
}
@Test
public void testAdditionalRealms_Null() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder(false, null);
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false);
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs");
@@ -373,12 +412,12 @@ public class AuthToLocalBuilderTest {
"RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
"RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
"DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ builder.generate());
}
@Test
public void testAdditionalRealms_Empty() {
- AuthToLocalBuilder builder = new AuthToLocalBuilder(false, "");
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", "", false);
builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs");
@@ -398,6 +437,48 @@ public class AuthToLocalBuilderTest {
"RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
"RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" +
"DEFAULT",
- builder.generate("EXAMPLE.COM"));
+ builder.generate());
+ }
+
+ @Test
+ public void testUseCase() {
+ AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", "FOOBAR.COM,HW.HDP,BAZ.NET", false);
+
+ String existingRules =
+ "RULE:[1:$1@$0](.*@BAZ.NET)s/@.*//\n" +
+ "RULE:[1:$1@$0](accumulo-c1@EXAMPLE.COM)s/.*/accumulo/\n" +
+ "RULE:[1:$1@$0](ambari-qa-c1@EXAMPLE.COM)s/.*/ambari-qa/\n" +
+ "RULE:[1:$1@$0](hbase-c1@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[1:$1@$0](hdfs-c1@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[1:$1@$0](spark-c1@EXAMPLE.COM)s/.*/spark/\n" +
+ "RULE:[1:$1@$0](tracer-c1@EXAMPLE.COM)s/.*/accumulo/\n" +
+ "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" +
+ "RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//\n" +
+ "RULE:[1:$1@$0](.*@HW.HDP)s/@.*//\n" +
+ "RULE:[2:$1@$0](accumulo@EXAMPLE.COM)s/.*/accumulo/\n" +
+ "RULE:[2:$1@$0](amshbase@EXAMPLE.COM)s/.*/ams/\n" +
+ "RULE:[2:$1@$0](amszk@EXAMPLE.COM)s/.*/ams/\n" +
+ "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](falcon@EXAMPLE.COM)s/.*/falcon/\n" +
+ "RULE:[2:$1@$0](hbase@EXAMPLE.COM)s/.*/hbase/\n" +
+ "RULE:[2:$1@$0](hive@EXAMPLE.COM)s/.*/hive/\n" +
+ "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" +
+ "RULE:[2:$1@$0](nm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" +
+ "RULE:[2:$1@$0](oozie@EXAMPLE.COM)s/.*/oozie/\n" +
+ "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" +
+ "RULE:[2:$1@$0](yarn@EXAMPLE.COM)s/.*/yarn/\n" +
+ "DEFAULT";
+
+ builder.addRules(existingRules);
+
+ builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs");
+ builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs");
+ builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn");
+ builder.addRule("yarn/_HOST@EXAMPLE.COM", "yarn");
+ builder.addRule("kafka/_HOST@EXAMPLE.COM", null);
+ builder.addRule("hdfs-c1@EXAMPLE.COM", "hdfs");
+
+ assertEquals(existingRules, builder.generate());
}
}
\ No newline at end of file
[14/24] ambari git commit: AMBARI-14970. Remove support for HDP 2.1
(aonishuk)
Posted by nc...@apache.org.
AMBARI-14970. Remove support for HDP 2.1 (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dfff43d7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dfff43d7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dfff43d7
Branch: refs/heads/branch-dev-patch-upgrade
Commit: dfff43d747b0b8c65756e4c0d7623fe7f45c6c4b
Parents: cb3b3ed
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Tue Feb 9 13:47:22 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Tue Feb 9 13:47:22 2016 +0200
----------------------------------------------------------------------
ambari-server/src/main/resources/stacks/HDP/2.1/metainfo.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/dfff43d7/ambari-server/src/main/resources/stacks/HDP/2.1/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.1/metainfo.xml
index 8ee982c..4af3622 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/metainfo.xml
@@ -17,7 +17,7 @@
-->
<metainfo>
<versions>
- <active>true</active>
+ <active>false</active>
</versions>
<extends>2.0.6</extends>
<minJdk>1.7</minJdk>