You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2017/01/30 15:40:22 UTC
[01/30] ambari git commit: AMBARI-19737. Add a validation check in
HSI about Capacity Scheduler preemption requirements.
Repository: ambari
Updated Branches:
refs/heads/branch-dev-patch-upgrade 535327d12 -> 393ab3ee2
AMBARI-19737. Add a validation check in HSI about Capacity Scheduler preemption requirements.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dcdf95b2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dcdf95b2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dcdf95b2
Branch: refs/heads/branch-dev-patch-upgrade
Commit: dcdf95b28a98e47062727f270adf2b4c69816f93
Parents: 9963ac8
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Fri Jan 27 11:02:48 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Fri Jan 27 11:02:48 2017 -0800
----------------------------------------------------------------------
.../stacks/HDP/2.5/services/stack_advisor.py | 12 ++++++++++++
.../2.5/common/services-normal-his-2-hosts.json | 8 +++++++-
.../2.5/common/services-normal-his-valid.json | 8 +++++++-
.../stacks/2.5/common/test_stack_advisor.py | 18 +++++++++++-------
4 files changed, 37 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcdf95b2/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
index 8e377da..ca4a968 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -388,8 +388,11 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
def validateHiveInteractiveEnvConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
hive_site_env_properties = self.getSiteProperties(configurations, "hive-interactive-env")
+ yarn_site_properties = self.getSiteProperties(configurations, "yarn-site")
validationItems = []
hsi_hosts = self.getHostsForComponent(services, "HIVE", "HIVE_SERVER_INTERACTIVE")
+
+ # Check for expecting 'enable_hive_interactive' is ON given that there is HSI on atleast one host present.
if len(hsi_hosts) > 0:
# HIVE_SERVER_INTERACTIVE is mapped to a host
if 'enable_hive_interactive' not in hive_site_env_properties or (
@@ -407,6 +410,15 @@ class HDP25StackAdvisor(HDP24StackAdvisor):
"item": self.getErrorItem(
"enable_hive_interactive in hive-interactive-env should be set to false.")})
+ # Check for 'yarn.resourcemanager.scheduler.monitor.enable' config to be true if HSI is ON.
+ if yarn_site_properties and 'yarn.resourcemanager.scheduler.monitor.enable' in yarn_site_properties:
+ scheduler_monitor_enabled = yarn_site_properties['yarn.resourcemanager.scheduler.monitor.enable']
+ if scheduler_monitor_enabled.lower() == 'false' and hive_site_env_properties and 'enable_hive_interactive' in hive_site_env_properties and \
+ hive_site_env_properties['enable_hive_interactive'].lower() == 'true':
+ validationItems.append({"config-name": "enable_hive_interactive",
+ "item": self.getWarnItem(
+ "When enabling LLAP, set 'yarn.resourcemanager.scheduler.monitor.enable' to true to ensure that LLAP gets the full allocated capacity.")})
+
validationProblems = self.toConfigurationValidationProblems(validationItems, "hive-interactive-env")
return validationProblems
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcdf95b2/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-2-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-2-hosts.json b/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-2-hosts.json
index 44698ed..0044bf1 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-2-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-2-hosts.json
@@ -1108,7 +1108,8 @@
"hive.llap.daemon.queue.name": "llap",
"hive.server2.enable.doAs": "true",
"hive.llap.daemon.num.executors": "0",
- "hive.server2.tez.sessions.per.default.queue": "32"
+ "hive.server2.tez.sessions.per.default.queue": "32",
+ "hive.tez.container.size" : "341"
}
},
"hive-interactive-env" : {
@@ -1159,6 +1160,11 @@
"properties": {
"tez.am.resource.memory.mb" : "341"
}
+ },
+ "tez-interactive-site": {
+ "properties": {
+ "tez.am.resource.memory.mb" : "341"
+ }
}
},
"changed-configurations" : [ ],
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcdf95b2/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-valid.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-valid.json b/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-valid.json
index 7955333..fad4a55 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-valid.json
+++ b/ambari-server/src/test/python/stacks/2.5/common/services-normal-his-valid.json
@@ -1049,7 +1049,8 @@
"configurations" : {
"hive-interactive-site" : {
"properties": {
- "hive.llap.daemon.queue.name": "llap"
+ "hive.llap.daemon.queue.name": "llap",
+ "hive.tez.container.size" : "341"
}
},
"capacity-scheduler" : {
@@ -1095,6 +1096,11 @@
"properties": {
"tez.am.resource.memory.mb" : "341"
}
+ },
+ "tez-interactive-site": {
+ "properties": {
+ "tez.am.resource.memory.mb" : "341"
+ }
}
},
"changed-configurations" : [ ],
http://git-wip-us.apache.org/repos/asf/ambari/blob/dcdf95b2/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
index a53cb25..ac00bab 100644
--- a/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.5/common/test_stack_advisor.py
@@ -492,7 +492,8 @@ class TestHDP25StackAdvisor(TestCase):
"properties": {"hive_security_authorization": "None"}
},
"yarn-site": {
- "properties": {"yarn.resourcemanager.work-preserving-recovery.enabled": "true"}
+ "properties": {"yarn.resourcemanager.work-preserving-recovery.enabled": "true",
+ "yarn.resourcemanager.scheduler.monitor.enable": "false"}
}
}
configurations2 = {
@@ -506,17 +507,22 @@ class TestHDP25StackAdvisor(TestCase):
"properties": {"hive_security_authorization": "None"}
},
"yarn-site": {
- "properties": {"yarn.resourcemanager.work-preserving-recovery.enabled": "true"}
+ "properties": {"yarn.resourcemanager.work-preserving-recovery.enabled": "true",
+ "yarn.resourcemanager.scheduler.monitor.enable": "true"}
}
}
services = self.load_json("services-normal-his-valid.json")
+ # Checks for WARN message that 'yarn.resourcemanager.scheduler.monitor.enable' should be true.
res_expected = [
+ {'config-type': 'hive-interactive-env', 'message': "When enabling LLAP, set 'yarn.resourcemanager.scheduler.monitor.enable' to true to ensure that LLAP gets the full allocated capacity.", 'type': 'configuration', 'config-name': 'enable_hive_interactive', 'level': 'WARN'}
]
# the above error is not what we are checking for - just to keep test happy without having to test
res = self.stackAdvisor.validateHiveInteractiveEnvConfigurations(properties, recommendedDefaults, configurations, services, {})
self.assertEquals(res, res_expected)
+ # (1). Checks for ERROR message for 'enable_hive_interactive' to be true.
+ # (2). Further, no message regarding 'yarn.resourcemanager.scheduler.monitor.enable' as it is true already.
res_expected = [
{'config-type': 'hive-interactive-env', 'message': 'HIVE_SERVER_INTERACTIVE requires enable_hive_interactive in hive-interactive-env set to true.', 'type': 'configuration', 'config-name': 'enable_hive_interactive', 'level': 'ERROR'}
]
@@ -525,7 +531,6 @@ class TestHDP25StackAdvisor(TestCase):
pass
- ''' TODO: Commenting, Need to fix validations in 2.5/stack_advisor and then fix the test code
"""
Tests validations for Hive Server Interactive site.
"""
@@ -558,7 +563,7 @@ class TestHDP25StackAdvisor(TestCase):
# Expected : Error telling about the current size compared to minimum required size.
services1 = self.load_json("services-normal-his-valid.json")
res_expected1 = [
- {'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' capacity (49%) is less than minimum required "
+ {'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' capacity (49.0%) is less than minimum required "
"capacity (50%) for LLAP app to run", 'type': 'configuration', 'config-name': 'hive.llap.daemon.queue.name', 'level': 'ERROR'},
]
res1 = self.stackAdvisor.validateHiveInteractiveSiteConfigurations({}, {}, {}, services1, hosts)
@@ -575,7 +580,7 @@ class TestHDP25StackAdvisor(TestCase):
# than 50% of queue capacity for LLAP.
services2 = self.load_json("services-normal-his-2-hosts.json")
res_expected2 = [
- {'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' capacity (49%) is less than minimum required "
+ {'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' capacity (49.0%) is less than minimum required "
"capacity (50%) for LLAP app to run", 'type': 'configuration', 'config-name': 'hive.llap.daemon.queue.name', 'level': 'ERROR'},
{'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' current state is : 'STOPPED'. It is required to be in "
"'RUNNING' state for LLAP to run", 'type': 'configuration', 'config-name': 'hive.llap.daemon.queue.name', 'level': 'ERROR'},
@@ -636,7 +641,7 @@ class TestHDP25StackAdvisor(TestCase):
},
}
res_expected4 = [
- {'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' capacity (49%) is less than minimum required capacity (200%) for LLAP app to run",
+ {'config-type': 'hive-interactive-site', 'message': "Selected queue 'llap' capacity (49.0%) is less than minimum required capacity (200%) for LLAP app to run",
'type': 'configuration', 'config-name': 'hive.llap.daemon.queue.name', 'level': 'ERROR'},
{'config-type': 'hive-interactive-site', 'message': "Capacity used by 'llap' queue is '250.88'. Service checks may not run as remaining available capacity "
"(261.12) in cluster is less than 512 MB.", 'type': 'configuration', 'config-name': 'hive.llap.daemon.queue.name', 'level': 'WARN'}]
@@ -645,7 +650,6 @@ class TestHDP25StackAdvisor(TestCase):
self.assertEquals(res4, res_expected4)
pass
- '''
[15/30] ambari git commit: AMBARI-19767. Inconsistent auth-to-local
rules processing during Kerberos authentication (rlevas)
Posted by nc...@apache.org.
AMBARI-19767. Inconsistent auth-to-local rules processing during Kerberos authentication (rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/716b2fca
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/716b2fca
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/716b2fca
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 716b2fca38a9db43b3211b9380f18149a3342256
Parents: e0765d9
Author: Robert Levas <rl...@hortonworks.com>
Authored: Sun Jan 29 11:14:59 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Sun Jan 29 11:15:24 2017 -0500
----------------------------------------------------------------------
.../AmbariAuthToLocalUserDetailsService.java | 17 ++++++++++++-----
1 file changed, 12 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/716b2fca/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
index c85503c..1e4f6ea 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authentication/kerberos/AmbariAuthToLocalUserDetailsService.java
@@ -49,6 +49,8 @@ public class AmbariAuthToLocalUserDetailsService implements UserDetailsService {
private final List<UserType> userTypeOrder;
+ private final String authToLocalRules;
+
/**
* Constructor.
* <p>
@@ -80,18 +82,23 @@ public class AmbariAuthToLocalUserDetailsService implements UserDetailsService {
orderedUserTypes = Collections.singletonList(UserType.LDAP);
}
- KerberosName.setRules(authToLocalRules);
-
this.users = users;
this.userTypeOrder = orderedUserTypes;
+ this.authToLocalRules = authToLocalRules;
}
@Override
public UserDetails loadUserByUsername(String principal) throws UsernameNotFoundException {
- KerberosName kerberosName = new KerberosName(principal);
-
try {
- String username = kerberosName.getShortName();
+ String username;
+
+ // Since KerberosName relies on a static variable to hold on to the auth-to-local rules, attempt
+ // to protect access to the rule set by blocking other threads from chaning the rules out from
+ // under us during this operation. Similar logic is used in org.apache.ambari.server.view.ViewContextImpl.getUsername().
+ synchronized (KerberosName.class) {
+ KerberosName.setRules(authToLocalRules);
+ username = new KerberosName(principal).getShortName();
+ }
if (username == null) {
String message = String.format("Failed to translate %s to a local username during Kerberos authentication.", principal);
[28/30] ambari git commit: AMBARI-19775. ActionQueue thread may exit
due to IOError in statusCommandResultQueue.empty(). (Attila Doroszlai via
stoader)
Posted by nc...@apache.org.
AMBARI-19775. ActionQueue thread may exit due to IOError in statusCommandResultQueue.empty(). (Attila Doroszlai via stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b722ffa6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b722ffa6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b722ffa6
Branch: refs/heads/branch-dev-patch-upgrade
Commit: b722ffa6f612685096c0c528264fdf1ae86b5d64
Parents: af6ba59
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Mon Jan 30 14:58:47 2017 +0100
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Mon Jan 30 14:58:47 2017 +0100
----------------------------------------------------------------------
.../src/main/python/ambari_agent/ActionQueue.py | 28 +++++++++++---------
1 file changed, 16 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/b722ffa6/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 18d7c2a..8514a88 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -212,18 +212,22 @@ class ActionQueue(threading.Thread):
pass
def processStatusCommandResultQueueSafeEmpty(self):
- while not self.statusCommandResultQueue.empty():
- try:
- result = self.statusCommandResultQueue.get(False)
- self.process_status_command_result(result)
- except Queue.Empty:
- pass
- except IOError:
- # on race condition in multiprocessing.Queue if get/put and thread kill are executed at the same time.
- # During queue.close IOError will be thrown (this prevents from permanently dead-locked get).
- pass
- except UnicodeDecodeError:
- pass
+ try:
+ while not self.statusCommandResultQueue.empty():
+ try:
+ result = self.statusCommandResultQueue.get(False)
+ self.process_status_command_result(result)
+ except Queue.Empty:
+ pass
+ except IOError:
+ # on race condition in multiprocessing.Queue if get/put and thread kill are executed at the same time.
+ # During queue.close IOError will be thrown (this prevents from permanently dead-locked get).
+ pass
+ except UnicodeDecodeError:
+ pass
+ except IOError:
+ # queue.empty() may also throw IOError
+ pass
def createCommandHandle(self, command):
if command.has_key('__handle'):
[06/30] ambari git commit: AMBARI-19754. Remove white border from the
content area in popup wizards.(xiwang)
Posted by nc...@apache.org.
AMBARI-19754. Remove white border from the content area in popup wizards.(xiwang)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b3391f0d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b3391f0d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b3391f0d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: b3391f0dd030130add23b9ffb09fb7a776dfe969
Parents: 9d8a54c
Author: Xi Wang <xi...@apache.org>
Authored: Fri Jan 27 11:16:49 2017 -0800
Committer: Xi Wang <xi...@apache.org>
Committed: Fri Jan 27 14:22:02 2017 -0800
----------------------------------------------------------------------
ambari-web/app/styles/modal_popups.less | 42 +++++++++++++++++++-
.../views/common/modal_popups/log_tail_popup.js | 2 +-
.../admin/stack_upgrade/upgrade_history_view.js | 2 +-
3 files changed, 42 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/b3391f0d/ambari-web/app/styles/modal_popups.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/modal_popups.less b/ambari-web/app/styles/modal_popups.less
index c10ef30..a6a0196 100644
--- a/ambari-web/app/styles/modal_popups.less
+++ b/ambari-web/app/styles/modal_popups.less
@@ -81,10 +81,14 @@
margin: 0;
}
-/*90% width modal window start*/
+/*90% width wizard modal window start*/
.wizard-modal-wrapper {
.modal {
.modal-body {
+ padding: 0;
+ .wizard {
+ border: none;
+ }
div.error {
color: #b94a48;
.help-block {
@@ -114,8 +118,42 @@
}
}
+/*90% width wizard modal window end*/
+
+/*90% width modal (non-wizard) window start*/
+
+.wide-modal-wrapper {
+ .modal {
+ .modal-body {
+ div.error {
+ color: #b94a48;
+ .help-block {
+ color: #b94a48;
+ }
+ }
+
+ div.error input{
+ border-color: #b94a48;
+ -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
+ -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
+ box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
+ }
+ }
+ }
+
+ .content {
+ padding: 0;
+ }
-/*90% width modal window end*/
+ //fix stripped in inner table
+ .table-striped tbody tr:nth-child(odd)
+ td .table-striped tbody
+ tr:nth-child(odd) td,
+ tr:nth-child(even) th {
+ background-color: transparent;
+ }
+}
+/*90% width modal window (non-wizard) end*/
/*700px width modal window start*/
.modal-690px-width {
http://git-wip-us.apache.org/repos/asf/ambari/blob/b3391f0d/ambari-web/app/views/common/modal_popups/log_tail_popup.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/modal_popups/log_tail_popup.js b/ambari-web/app/views/common/modal_popups/log_tail_popup.js
index 1e6924f..6510ac9 100644
--- a/ambari-web/app/views/common/modal_popups/log_tail_popup.js
+++ b/ambari-web/app/views/common/modal_popups/log_tail_popup.js
@@ -22,7 +22,7 @@ var fileUtils = require('utils/file_utils');
App.showLogTailPopup = function(content) {
return App.ModalPopup.show({
- classNames: ['log-tail-popup', 'wizard-modal-wrapper', 'full-height-modal'],
+ classNames: ['log-tail-popup', 'wide-modal-wrapper', 'full-height-modal'],
modalDialogClasses: ['modal-xlg'],
header: fileUtils.fileNameFromPath(content.get('filePath')),
primary: false,
http://git-wip-us.apache.org/repos/asf/ambari/blob/b3391f0d/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
index c4a428e..aeee96e 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_history_view.js
@@ -297,7 +297,7 @@ App.MainAdminStackUpgradeHistoryView = App.TableView.extend(App.TableServerViewM
this.get('controller').set('currentUpgradeRecord', record);
App.ModalPopup.show({
- classNames: ['wizard-modal-wrapper'],
+ classNames: ['wide-modal-wrapper'],
modalDialogClasses: ['modal-xlg'],
header: Em.I18n.t('admin.stackVersions.upgradeHistory.record.title').format(displayName, direction, toVersion),
bodyClass: App.MainAdminStackUpgradeHistoryDetailsView,
[09/30] ambari git commit: AMBARI-19743. Agent registration fails
with non-root agent user (Attila Doroszlai via smohanty)
Posted by nc...@apache.org.
AMBARI-19743. Agent registration fails with non-root agent user (Attila Doroszlai via smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0e8c966d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0e8c966d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0e8c966d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 0e8c966d20b895442ea461ce488dcb9aae4d8638
Parents: 771c9c0
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Jan 27 18:17:48 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Fri Jan 27 18:19:36 2017 -0800
----------------------------------------------------------------------
ambari-server/src/main/python/bootstrap.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0e8c966d/ambari-server/src/main/python/bootstrap.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/bootstrap.py b/ambari-server/src/main/python/bootstrap.py
index d836040..f1c53ce 100755
--- a/ambari-server/src/main/python/bootstrap.py
+++ b/ambari-server/src/main/python/bootstrap.py
@@ -630,8 +630,7 @@ class BootstrapDefault(Bootstrap):
self.host_log.write("==========================\n")
self.host_log.write("Running create-python-wrap script...")
- command = "chmod a+x %s && %s" % \
- (REMOTE_CREATE_PYTHON_WRAP_SCRIPT, REMOTE_CREATE_PYTHON_WRAP_SCRIPT)
+ command = "chmod a+x {script} && {sudo} {script}".format(sudo=AMBARI_SUDO, script=REMOTE_CREATE_PYTHON_WRAP_SCRIPT)
ssh = SSH(params.user, params.sshPort, params.sshkey_file, self.host, command,
params.bootdir, self.host_log)
[29/30] ambari git commit: AMBARI-19736. Format ZKFC commands failing
while enabling NameNode HA (stoader)
Posted by nc...@apache.org.
AMBARI-19736. Format ZKFC commands failing while enabling NameNode HA (stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d6d24798
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d6d24798
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d6d24798
Branch: refs/heads/branch-dev-patch-upgrade
Commit: d6d247983c5c66c11260a6d10e7b0e2746fa8146
Parents: b722ffa
Author: Toader, Sebastian <st...@hortonworks.com>
Authored: Mon Jan 30 15:54:20 2017 +0100
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Mon Jan 30 15:54:20 2017 +0100
----------------------------------------------------------------------
.../2.1.0.2.0/package/scripts/hdfs_namenode.py | 4 ++
.../HDFS/2.1.0.2.0/package/scripts/utils.py | 14 ++++---
.../2.1.0.2.0/package/scripts/zkfc_slave.py | 5 ---
.../3.0.0.3.0/package/scripts/hdfs_namenode.py | 4 ++
.../HDFS/3.0.0.3.0/package/scripts/utils.py | 42 +++++++++++---------
.../3.0.0.3.0/package/scripts/zkfc_slave.py | 7 ----
.../2.0.6/hooks/before-ANY/scripts/params.py | 16 ++++++--
.../HDP/3.0/hooks/before-ANY/scripts/params.py | 19 +++++++--
8 files changed, 68 insertions(+), 43 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index a2edf38..0489792 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -34,6 +34,7 @@ from resource_management.libraries.functions import Direction
from ambari_commons import OSCheck, OSConst
from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
from utils import get_dfsadmin_base_command
+from utils import set_up_zkfc_security
if OSCheck.is_windows_family():
from resource_management.libraries.functions.windows_service_utils import check_windows_service_status
@@ -95,6 +96,9 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None,
#we need this directory to be present before any action(HA manual steps for
#additional namenode)
create_name_dirs(params.dfs_name_dir)
+
+ # set up failover / secure zookeper ACLs, this feature is supported from HDP 2.6 ownwards
+ set_up_zkfc_security(params)
elif action == "start":
Logger.info("Called service {0} with upgrade_type: {1}".format(action, str(upgrade_type)))
setup_ranger_hdfs(upgrade_type=upgrade_type)
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index 03aba7b..d8d0515 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -387,20 +387,24 @@ def get_dfsadmin_base_command(hdfs_binary, use_specific_namenode = False):
def set_up_zkfc_security(params):
""" Sets up security for accessing zookeper on secure clusters """
- # check if the namenode is HA (this may be redundant as the component is only installed if affirmative)
+ if params.stack_supports_zk_security is False:
+ Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for HDP 2.6 and above.")
+ return
+
+ # check if the namenode is HA
if params.dfs_ha_enabled is False:
- Logger.info("The namenode is not HA, zkfc security setup skipped.")
+ Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for NameNode HA mode.")
return
# check if the cluster is secure (skip otherwise)
if params.security_enabled is False:
- Logger.info("The cluster is not secure, zkfc security setup skipped.")
+ Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for secure clusters.")
return
# process the JAAS template
File(os.path.join(params.hadoop_conf_secure_dir, 'hdfs_jaas.conf'),
- owner='root',
- group='root',
+ owner=params.hdfs_user,
+ group=params.user_group,
mode=0644,
content=Template("hdfs_jaas.conf.j2")
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index bfc9429..c2ff457 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -62,11 +62,6 @@ class ZkfcSlave(Script):
import params
env.set_params(params)
hdfs("zkfc_slave")
-
- # set up failover / zookeper ACLs, this feature is supported from HDP 2.6 ownwards
- if params.stack_supports_zk_security:
- utils.set_up_zkfc_security(params)
-
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
index 7fae57f..a0ed658 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
@@ -34,6 +34,7 @@ from resource_management.libraries.functions import Direction
from ambari_commons import OSCheck, OSConst
from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
from utils import get_dfsadmin_base_command
+from utils import set_up_zkfc_security
if OSCheck.is_windows_family():
from resource_management.libraries.functions.windows_service_utils import check_windows_service_status
@@ -95,6 +96,9 @@ def namenode(action=None, hdfs_binary=None, do_format=True, upgrade_type=None,
#we need this directory to be present before any action(HA manual steps for
#additional namenode)
create_name_dirs(params.dfs_name_dir)
+
+ # set up failover / secure zookeper ACLs, this feature is supported from HDP 2.6 ownwards
+ set_up_zkfc_security(params)
elif action == "start":
Logger.info("Called service {0} with upgrade_type: {1}".format(action, str(upgrade_type)))
setup_ranger_hdfs(upgrade_type=upgrade_type)
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/utils.py
index 9eebe63..48f5a1f 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/utils.py
@@ -386,23 +386,27 @@ def get_dfsadmin_base_command(hdfs_binary, use_specific_namenode = False):
def set_up_zkfc_security(params):
- """ Sets up security for accessing zookeper on secure clusters """
-
- # check if the namenode is HA (this may be redundant as the component is only installed if affirmative)
- if params.dfs_ha_enabled is False:
- Logger.info("The namenode is not HA, zkfc security setup skipped.")
- return
-
- # check if the cluster is secure (skip otherwise)
- if params.security_enabled is False:
- Logger.info("The cluster is not secure, zkfc security setup skipped.")
- return
-
- # process the JAAS template
- File(os.path.join(params.hadoop_conf_secure_dir, 'hdfs_jaas.conf'),
- owner='root',
- group='root',
- mode=0644,
- content=Template("hdfs_jaas.conf.j2")
- )
+ """ Sets up security for accessing zookeper on secure clusters """
+
+ if params.stack_supports_zk_security is False:
+ Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for HDP 2.6 and above.")
+ return
+
+ # check if the namenode is HA
+ if params.dfs_ha_enabled is False:
+ Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for NameNode HA mode.")
+ return
+
+ # check if the cluster is secure (skip otherwise)
+ if params.security_enabled is False:
+ Logger.info("Skipping setting up secure ZNode ACL for HFDS as it's supported only for secure clusters.")
+ return
+
+ # process the JAAS template
+ File(os.path.join(params.hadoop_conf_secure_dir, 'hdfs_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ mode=0644,
+ content=Template("hdfs_jaas.conf.j2")
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
index f2ea6ad..6a0d71f 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
@@ -36,9 +36,7 @@ from resource_management.libraries.functions.security_commons import get_params_
from resource_management.libraries.functions.security_commons import validate_security_config_properties
from resource_management.libraries.functions.security_commons import FILE_TYPE_XML
from resource_management.libraries.functions.stack_features import check_stack_feature
-from resource_management.libraries.functions.version import compare_versions
from resource_management.libraries.script import Script
-from resource_management.libraries.functions.version_select_util import get_component_version
from resource_management.core.resources.zkmigrator import ZkMigrator
class ZkfcSlave(Script):
@@ -62,11 +60,6 @@ class ZkfcSlave(Script):
import params
env.set_params(params)
hdfs("zkfc_slave")
-
- # set up failover / zookeper ACLs, this feature is supported from HDP 2.6 ownwards
- if params.stack_supports_zk_security:
- utils.set_up_zkfc_security(params)
-
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 8e0e783..a748b33 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -182,7 +182,6 @@ oozie_servers = default("/clusterHostInfo/oozie_server", [])
falcon_server_hosts = default("/clusterHostInfo/falcon_server_hosts", [])
ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
-zkfc_hosts = default("/clusterHostInfo/zkfc_hosts", [])
# get the correct version to use for checking stack features
version_for_stack_feature_checks = get_stack_feature_version(config)
@@ -196,9 +195,20 @@ has_oozie_server = not len(oozie_servers) == 0
has_falcon_server_hosts = not len(falcon_server_hosts) == 0
has_ranger_admin = not len(ranger_admin_hosts) == 0
has_zeppelin_master = not len(zeppelin_master_hosts) == 0
-has_zkfc_hosts = not len(zkfc_hosts)== 0
stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
+# HDFS High Availability properties
+dfs_ha_enabled = False
+dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
+if dfs_ha_nameservices is None:
+ dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.nameservices', None)
+dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+if dfs_ha_namenode_ids:
+ dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
+ dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
+ if dfs_ha_namenode_ids_array_len > 1:
+ dfs_ha_enabled = True
+
if has_namenode or dfs_type == 'HCFS':
hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
@@ -246,5 +256,5 @@ tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
# if NN HA on secure clutser, access Zookeper securely
-if stack_supports_zk_security and has_zkfc_hosts and security_enabled:
+if stack_supports_zk_security and dfs_ha_enabled and security_enabled:
hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")
http://git-wip-us.apache.org/repos/asf/ambari/blob/d6d24798/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
index 8e5d210..9be9101 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
@@ -178,7 +178,6 @@ oozie_servers = default("/clusterHostInfo/oozie_server", [])
falcon_server_hosts = default("/clusterHostInfo/falcon_server_hosts", [])
ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
-zkfc_hosts = default("/clusterHostInfo/zkfc_hosts", [])
# get the correct version to use for checking stack features
version_for_stack_feature_checks = get_stack_feature_version(config)
@@ -191,9 +190,21 @@ has_oozie_server = not len(oozie_servers) == 0
has_falcon_server_hosts = not len(falcon_server_hosts) == 0
has_ranger_admin = not len(ranger_admin_hosts) == 0
has_zeppelin_master = not len(zeppelin_master_hosts) == 0
-has_zkfc_hosts = not len(zkfc_hosts)== 0
stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
+# HDFS High Availability properties
+dfs_ha_enabled = False
+dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
+if dfs_ha_nameservices is None:
+ dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.nameservices', None)
+dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+if dfs_ha_namenode_ids:
+ dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
+ dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
+ if dfs_ha_namenode_ids_array_len > 1:
+ dfs_ha_enabled = True
+
+
if has_namenode or dfs_type == 'HCFS':
hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
@@ -241,5 +252,5 @@ tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
# if NN HA on secure clutser, access Zookeper securely
-if stack_supports_zk_security and has_zkfc_hosts and security_enabled:
- hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")
+if stack_supports_zk_security and dfs_ha_enabled and security_enabled:
+ hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")
[07/30] ambari git commit: AMBARI-19759. Fix Spark2 service
description string (Bikas Saha via smohanty)
Posted by nc...@apache.org.
AMBARI-19759. Fix Spark2 service description string (Bikas Saha via smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c6009853
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c6009853
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c6009853
Branch: refs/heads/branch-dev-patch-upgrade
Commit: c60098536dcd7faf85e364fdcafed17cbba340a6
Parents: b3391f0
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Jan 27 16:02:15 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Fri Jan 27 16:02:46 2017 -0800
----------------------------------------------------------------------
.../main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c6009853/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
index 34d3fdd..66341c9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/SPARK2/metainfo.xml
@@ -23,8 +23,9 @@
<services>
<service>
<name>SPARK2</name>
- <comment>Apache Spark 2.1 is a fast and general engine for large-scale data processing. This service is <b>Technical Preview</b>.</comment>
- <version>2.1.x</version>
+ <comment>Apache Spark is a fast and general engine for large-scale data processing</comment>
+ <version>2.x</version>
+ <selection>DEFAULT</selection>
<components>
<component>
[26/30] ambari git commit: AMBARI-19766. Not able to set the sla for
an action node (Padma Priya N via gauravn7)
Posted by nc...@apache.org.
AMBARI-19766. Not able to set the sla for an action node (Padma Priya N via gauravn7)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/da7202d4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/da7202d4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/da7202d4
Branch: refs/heads/branch-dev-patch-upgrade
Commit: da7202d41cbf9042ac844ad408a08c348ec06e7f
Parents: ff34b05
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Mon Jan 30 17:39:33 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Mon Jan 30 17:39:33 2017 +0530
----------------------------------------------------------------------
.../main/resources/ui/app/components/workflow-action-editor.js | 2 +-
.../main/resources/ui/app/templates/components/sla-info.hbs | 6 +++---
.../resources/ui/app/templates/components/sqoop-action.hbs | 2 +-
.../resources/ui/app/templates/components/workflow-actions.hbs | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/da7202d4/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
index f2d3ba8..43eeb5b 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/workflow-action-editor.js
@@ -132,7 +132,7 @@ export default Ember.Component.extend( Ember.Evented,{
}
if(!CommonUtils.isSupportedAction(this.get('actionType')) && !this.get('actionModel.slaInfo')){
this.set('customSlaInfo', SlaInfo.create({}));
- }else{
+ }else if(!CommonUtils.isSupportedAction(this.get('actionType')) && this.get('actionModel.slaInfo')){
this.set('customSlaInfo', this.get('actionModel.slaInfo'));
this.set('customSlaEnabled', this.get('actionModel.slaEnabled'));
delete this.get('actionModel').slaInfo;
http://git-wip-us.apache.org/repos/asf/ambari/blob/da7202d4/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
index 447bac2..2ebc259 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sla-info.hbs
@@ -35,7 +35,7 @@
<div class="form-group">
<label class="control-label col-xs-2">Should Start</label>
<div class="col-xs-3">
- {{input class="form-control" type="text" value=slaInfo.shouldStart.time placeholder="Should Start"}}
+ {{input class="form-control" type="text" value=slaInfo.shouldStart.time placeholder="Time period(eg. 5)" title="Time period in minutes/hours/days"}}
{{field-error model=this field='slaInfo.shouldStart.time' showErrorMessage=showErrorMessage}}
</div>
<div class="col-xs-2">
@@ -50,7 +50,7 @@
<div class="form-group">
<label class="control-label col-xs-2">Should End<span class="requiredField"> *</span></label>
<div class="col-xs-3">
- {{input class="form-control" type="text" value=slaInfo.shouldEnd.time placeholder="Should End"}}
+ {{input class="form-control" type="text" value=slaInfo.shouldEnd.time placeholder="Time period(eg. 5)" title="Time period in minutes/hours/days"}}
{{field-error model=this field='slaInfo.shouldEnd.time' showErrorMessage=showErrorMessage}}
</div>
<div class="col-xs-2">
@@ -65,7 +65,7 @@
<div class="form-group">
<label class="control-label col-xs-2">Max Duration</label>
<div class="col-xs-3">
- {{input class="form-control" type="text" value=slaInfo.maxDuration.time placeholder="Max Duration"}}
+ {{input class="form-control" type="text" value=slaInfo.maxDuration.time placeholder="Time period(eg. 5)" title="Time period in minutes/hours/days"}}
</div>
<div class="col-xs-2">
<select class="form-control" name="time-unit" title="Time Unit" onchange={{action (mut slaInfo.maxDuration.unit) value="target.value"}}>
http://git-wip-us.apache.org/repos/asf/ambari/blob/da7202d4/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
index 3f0ec61..fb59149 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/sqoop-action.hbs
@@ -32,7 +32,7 @@
<div class="form-group padding10">
<label for="inputEmail" class="control-label col-xs-2">Command</label>
<div class="col-xs-7">
- {{input type="text" class="form-control" name="command" value=actionModel.command placeholder="Sqoop Command"}}
+ {{input type="text" class="form-control" name="command" value=actionModel.command placeholder="Command" title="eg. import --connect jdbc:hsqldb:file:db.hsqldb --table TT --target-dir /user/${wf:user()}/output-data/sqoop -m 1"}}
</div>
</div>
{{/if}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/da7202d4/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
index badf320..265da9f 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/workflow-actions.hbs
@@ -49,7 +49,7 @@
<li {{action 'addAction' 'java'}} class="dr_action enabled" data-name="Java" data-type="java"> <i class="fa fa-code"></i> Java </li>
<li {{action 'addAction' 'shell'}} class="dr_action enabled" data-name="Shell" data-type="shell"> <i class="fa fa-terminal"></i> Shell </li>
<li {{action 'addAction' 'distcp'}} class="dr_action enabled" data-name="distcp" data-type="distcp"> <i class="fa fa-clone"></i> distcp </li>
- <li {{action 'addAction' 'map-reduce'}} class="dr_action enabled" data-name="MR" data-type="mapreduce"> <i class="fa fa-cubes"></i> MR </li>
+ <li {{action 'addAction' 'map-reduce'}} class="dr_action enabled" data-name="MR" data-type="map-reduce"> <i class="fa fa-cubes"></i> MR </li>
<li {{action 'addAction' 'ssh'}} class="dr_action enabled" data-name="SSH" data-type="ssh"> <i class="fa fa-terminal"></i> ssh </li>
<li {{action 'addAction' 'spark'}} class="dr_action enabled" data-name="Spark" data-type="spark"> <i class="fa fa-star"></i> Spark </li>
<li class="dr_action disabled hide" data-name="Stream" data-type="stream"> <i class="fa fa-exchange"></i> Stream </li>
[27/30] ambari git commit: AMBARI-19709:XML generated for bundle's is
not as per oozie xsd (Padma Priya N via gauravn7)
Posted by nc...@apache.org.
AMBARI-19709:XML generated for bundle's is not as per oozie xsd (Padma Priya N via gauravn7)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/af6ba590
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/af6ba590
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/af6ba590
Branch: refs/heads/branch-dev-patch-upgrade
Commit: af6ba590d20b75e6a276a9e0a81f416fefc8cfd9
Parents: da7202d
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Mon Jan 30 17:42:14 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Mon Jan 30 17:42:54 2017 +0530
----------------------------------------------------------------------
.../src/main/resources/ui/app/components/bundle-config.js | 2 +-
.../resources/ui/app/domain/bundle/bundle-xml-generator.js | 4 ++--
.../main/resources/ui/app/domain/bundle/bundle-xml-importer.js | 6 ++----
3 files changed, 5 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/af6ba590/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
index 8b4c3d8..fe6dfd1 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/bundle-config.js
@@ -160,7 +160,7 @@ export default Ember.Component.extend(Ember.Evented, Validations, {
throw new Error(e);
}.bind(this));
},
- getBundleFromJSON(filePath){
+ getBundleFromJSON(draftBundle){
this.set('bundle', JSON.parse(draftBundle));
},
getBundleFromHdfs(filePath){
http://git-wip-us.apache.org/repos/asf/ambari/blob/af6ba590/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-generator.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-generator.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-generator.js
index 7dc05a9..f2dcef9 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-generator.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-generator.js
@@ -25,8 +25,8 @@ var BundleGenerator= Ember.Object.extend({
bundleApp._xmlns = "uri:oozie:bundle:"+this.bundle.schemaVersions.bundleVersion;
bundleApp._name = this.bundle.name;
if(!Ember.isEmpty(this.bundle.kickOffTime.value)){
- bundleApp["control"] = {};
- bundleApp["control"]["kick-off-time"] = this.bundle.kickOffTime.value;
+ bundleApp["controls"] = {};
+ bundleApp["controls"]["kick-off-time"] = this.bundle.kickOffTime.value;
}
this.generateCoordinatorsJson(bundleApp);
var xmlAsStr = this.get("x2js").json2xml_str(xmlJson);
http://git-wip-us.apache.org/repos/asf/ambari/blob/af6ba590/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-importer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-importer.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-importer.js
index a2053c9..b3eed4b 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-importer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/bundle/bundle-xml-importer.js
@@ -49,10 +49,8 @@ var BundleXmlImporter= Ember.Object.extend({
} else {
bundle.schemaVersions.bundleVersion = bundleVersion;
}
- if(bundleApp.control && bundleApp.control["kick-off-time"]) {
- bundle.kickOffTime = this.extractDateField(bundleApp["control"]["kick-off-time"]);
- }else{
-
+ if(bundleApp.controls && bundleApp.controls["kick-off-time"]) {
+ bundle.kickOffTime = this.extractDateField(bundleApp["controls"]["kick-off-time"]);
}
this.processCoordinatorsJson(bundleApp, bundle);
return {bundle: bundle, errors: errors};
[14/30] ambari git commit: AMBARI-19727. Extend security.json of
Infra Solr as a configuration property in Ambari (oleewere)
Posted by nc...@apache.org.
AMBARI-19727. Extend security.json of Infra Solr as a configuration property in Ambari (oleewere)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e0765d92
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e0765d92
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e0765d92
Branch: refs/heads/branch-dev-patch-upgrade
Commit: e0765d9228aaa9ff3fba47249de67c85dd6eb945
Parents: 015d404
Author: oleewere <ol...@gmail.com>
Authored: Sun Jan 29 16:35:56 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Sun Jan 29 16:43:27 2017 +0100
----------------------------------------------------------------------
.../libraries/functions/solr_cloud_util.py | 6 ++--
.../logsearch/solr/AmbariSolrCloudCLI.java | 13 ++++++-
.../logsearch/solr/AmbariSolrCloudClient.java | 6 ++++
.../solr/AmbariSolrCloudClientBuilder.java | 6 ++++
.../EnableKerberosPluginSolrZkCommand.java | 25 ++++++++++----
.../configuration/infra-solr-security-json.xml | 36 ++++++++++++++++++++
.../AMBARI_INFRA/0.1.0/metainfo.xml | 1 +
.../0.1.0/package/scripts/params.py | 2 ++
.../0.1.0/package/scripts/setup_infra_solr.py | 12 ++++++-
.../properties/infra-solr-security.json.j2 | 22 ++++++++++++
.../stacks/2.4/AMBARI_INFRA/test_infra_solr.py | 7 ++++
.../test/python/stacks/2.4/configs/default.json | 3 ++
12 files changed, 128 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
index 3e076b8..4628211 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
@@ -143,14 +143,14 @@ def create_znode(zookeeper_quorum, solr_znode, java64_home, retry = 5 , interval
create_znode_cmd = format('{solr_cli_prefix} --create-znode --retry {retry} --interval {interval}')
Execute(create_znode_cmd)
-def setup_kerberos_plugin(zookeeper_quorum, solr_znode, java64_home, secure=False, jaas_file = None):
+def setup_kerberos_plugin(zookeeper_quorum, solr_znode, java64_home, secure=False, security_json_location = None, jaas_file = None):
"""
Set Kerberos plugin on the Solr znode in security.json, if secure is False, then clear the security.json
"""
solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
setup_kerberos_plugin_cmd = format('{solr_cli_prefix} --setup-kerberos-plugin')
- if secure and jaas_file is not None:
- setup_kerberos_plugin_cmd+=format(' --jaas-file {jaas_file} --secure')
+ if secure and jaas_file is not None and security_json_location is not None:
+ setup_kerberos_plugin_cmd+=format(' --jaas-file {jaas_file} --secure --security-json-location {security_json_location}')
Execute(setup_kerberos_plugin_cmd)
def set_cluster_prop(zookeeper_quorum, solr_znode, prop_name, prop_value, java64_home, jaas_file = None):
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
index 94e43e4..be73473 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
@@ -50,6 +50,7 @@ public class AmbariSolrCloudCLI {
private static final String CHECK_ZNODE = "check-znode";
private static final String SECURE_ZNODE_COMMAND = "secure-znode";
private static final String SECURE_SOLR_ZNODE_COMMAND = "secure-solr-znode";
+ private static final String SECURITY_JSON_LOCATION = "security-json-location";
private static final String CMD_LINE_SYNTAX =
"\n./solrCloudCli.sh --create-collection -z host1:2181,host2:2181/ambari-solr -c collection -cs conf_set"
+ "\n./solrCloudCli.sh --upload-config -z host1:2181,host2:2181/ambari-solr -d /tmp/myconfig_dir -cs config_set"
@@ -61,7 +62,7 @@ public class AmbariSolrCloudCLI {
+ "\n./solrCloudCli.sh --cluster-prop -z host1:2181,host2:2181/ambari-solr -cpn urlScheme -cpn http"
+ "\n./solrCloudCli.sh --secure-znode -z host1:2181,host2:2181 -zn /ambari-solr -su logsearch,atlas,ranger --jaas-file /etc/myconf/jaas_file"
+ "\n./solrCloudCli.sh --secure-solr-znode -z host1:2181,host2:2181 -zn /ambari-solr -su logsearch,atlas,ranger --jaas-file /etc/myconf/jaas_file"
- + "\n./solrCloudCli.sh --setup-kerberos-plugin -z host1:2181,host2:2181 -zn /ambari-solr\n";
+ + "\n./solrCloudCli.sh --setup-kerberos-plugin -z host1:2181,host2:2181 -zn /ambari-solr --security-json-location /etc/infra-solr/conf/security.json\n";
public static void main(String[] args) {
Options options = new Options();
@@ -306,6 +307,13 @@ public class AmbariSolrCloudCLI {
.argName("atlas,ranger,logsearch-solr")
.build();
+ final Option securityJsonLocationOption = Option.builder("sjl")
+ .longOpt(SECURITY_JSON_LOCATION)
+ .desc("Local security.json path")
+ .numberOfArgs(1)
+ .argName("security.json location")
+ .build();
+
final Option secureOption = Option.builder("sec")
.longOpt("secure")
.desc("Flag for enable/disable kerberos (with --setup-kerberos or --setup-kerberos-plugin)")
@@ -349,6 +357,7 @@ public class AmbariSolrCloudCLI {
options.addOption(saslUsersOption);
options.addOption(checkZnodeOption);
options.addOption(setupKerberosPluginOption);
+ options.addOption(securityJsonLocationOption);
AmbariSolrCloudClient solrCloudClient = null;
@@ -427,6 +436,7 @@ public class AmbariSolrCloudCLI {
String znode = cli.hasOption("zn") ? cli.getOptionValue("zn") : null;
boolean isSecure = cli.hasOption("sec");
String saslUsers = cli.hasOption("su") ? cli.getOptionValue("su") : "";
+ String securityJsonLocation = cli.hasOption("sjl") ? cli.getOptionValue("sjl") : "";
AmbariSolrCloudClientBuilder clientBuilder = new AmbariSolrCloudClientBuilder()
.withZkConnectString(zkConnectString)
@@ -450,6 +460,7 @@ public class AmbariSolrCloudCLI {
.withTrustStoreType(trustStoreType)
.withClusterPropName(clusterPropName)
.withClusterPropValue(clusterPropValue)
+ .withSecurityJsonLocation(securityJsonLocation)
.withZnode(znode)
.withSecure(isSecure)
.withSaslUsers(saslUsers);
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
index 6bb5a77..f54401d 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
@@ -68,6 +68,7 @@ public class AmbariSolrCloudClient {
private final String saslUsers;
private final String propName;
private final String propValue;
+ private final String securityJsonLocation;
private final boolean secure;
public AmbariSolrCloudClient(AmbariSolrCloudClientBuilder builder) {
@@ -90,6 +91,7 @@ public class AmbariSolrCloudClient {
this.saslUsers = builder.saslUsers;
this.propName = builder.propName;
this.propValue = builder.propValue;
+ this.securityJsonLocation = builder.securityJsonLocation;
this.secure = builder.secure;
}
@@ -325,4 +327,8 @@ public class AmbariSolrCloudClient {
public boolean isSecure() {
return secure;
}
+
+ public String getSecurityJsonLocation() {
+ return securityJsonLocation;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
index 590485f..7593da6 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
@@ -51,6 +51,7 @@ public class AmbariSolrCloudClientBuilder {
String saslUsers;
String propName;
String propValue;
+ String securityJsonLocation;
boolean secure;
public AmbariSolrCloudClient build() {
@@ -195,6 +196,11 @@ public class AmbariSolrCloudClientBuilder {
return this;
}
+ public AmbariSolrCloudClientBuilder withSecurityJsonLocation(String securityJson) {
+ this.securityJsonLocation = securityJson;
+ return this;
+ }
+
public AmbariSolrCloudClientBuilder withSecure(boolean isSecure) {
this.secure = isSecure;
return this;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/EnableKerberosPluginSolrZkCommand.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/EnableKerberosPluginSolrZkCommand.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/EnableKerberosPluginSolrZkCommand.java
index fe97994..3807887 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/EnableKerberosPluginSolrZkCommand.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/EnableKerberosPluginSolrZkCommand.java
@@ -19,16 +19,19 @@
package org.apache.ambari.logsearch.solr.commands;
import org.apache.ambari.logsearch.solr.AmbariSolrCloudClient;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.SolrZooKeeper;
import org.apache.zookeeper.CreateMode;
+import java.io.File;
+import java.io.IOException;
import java.nio.charset.StandardCharsets;
public class EnableKerberosPluginSolrZkCommand extends AbstractZookeeperRetryCommand<String> {
private static final String SECURITY_JSON = "/security.json";
- private static final String SECURE_CONTENT = "{\"authentication\":{\"class\": \"org.apache.solr.security.KerberosPlugin\"}}";
private static final String UNSECURE_CONTENT = "{}";
public EnableKerberosPluginSolrZkCommand(int maxRetries, int interval) {
@@ -39,12 +42,13 @@ public class EnableKerberosPluginSolrZkCommand extends AbstractZookeeperRetryCom
protected String executeZkCommand(AmbariSolrCloudClient client, SolrZkClient zkClient, SolrZooKeeper solrZooKeeper) throws Exception {
String result = "";
String filePath = client.getZnode() + SECURITY_JSON;
- String fileContent = getFileContent(zkClient, filePath);
+ String fileContent = getFileContentFromZnode(zkClient, filePath);
+ String securityContent = getFileContent(client.getSecurityJsonLocation());
if (client.isSecure()) {
- if (!fileContent.equals(SECURE_CONTENT)) {
- putFileContent(zkClient, filePath, SECURE_CONTENT);
+ if (!fileContent.equals(securityContent)) {
+ putFileContent(zkClient, filePath, securityContent);
}
- result = SECURE_CONTENT;
+ result = securityContent;
} else {
if (!fileContent.equals(UNSECURE_CONTENT)) {
putFileContent(zkClient, filePath, UNSECURE_CONTENT);
@@ -62,7 +66,7 @@ public class EnableKerberosPluginSolrZkCommand extends AbstractZookeeperRetryCom
}
}
- private String getFileContent(SolrZkClient zkClient, String fileName) throws Exception {
+ private String getFileContentFromZnode(SolrZkClient zkClient, String fileName) throws Exception {
String result;
if (zkClient.exists(fileName, true)) {
byte[] data = zkClient.getData(fileName, null, null, true);
@@ -72,4 +76,13 @@ public class EnableKerberosPluginSolrZkCommand extends AbstractZookeeperRetryCom
}
return result;
}
+
+ private String getFileContent(String fileLocation) throws IOException {
+ File securityJson = new File(fileLocation);
+ if (StringUtils.isNotEmpty(fileLocation) && securityJson.exists()) {
+ return FileUtils.readFileToString(securityJson);
+ } else {
+ return UNSECURE_CONTENT;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
new file mode 100644
index 0000000..ed623df
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-solr-security-json.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<!-- This is a special config file for properties used to monitor status of the service -->
+<configuration supports_adding_forbidden="true">
+ <property>
+ <name>content</name>
+ <display-name>infra-solr security.json template</display-name>
+ <description>This is the jinja template for security.json file on the solr znode (only used if the cluster is secure)</description>
+ <value/>
+ <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+ <value-attributes>
+ <property-file-name>infra-solr-security.json.j2</property-file-name>
+ <property-file-type>text</property-file-type>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+</configuration>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml
index b94812b..1c5bf6e 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/metainfo.xml
@@ -64,6 +64,7 @@
<config-type>infra-solr-env</config-type>
<config-type>infra-solr-xml</config-type>
<config-type>infra-solr-log4j</config-type>
+ <config-type>infra-solr-security-json</config-type>
</configuration-dependencies>
</component>
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
index c2b8a44..526baea 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/params.py
@@ -120,6 +120,8 @@ if security_enabled:
infra_solr_kerberos_name_rules = config['configurations']['infra-solr-env']['infra_solr_kerberos_name_rules'].replace('$', '\$')
infra_solr_sasl_user = get_name_from_principal(infra_solr_kerberos_principal)
+infra_solr_security_json_content = config['configurations']['infra-solr-security-json']['content']
+
#Solr log4j
infra_log_maxfilesize = default('configurations/infra-solr-log4j/infra_log_maxfilesize',10)
infra_log_maxbackupindex = default('configurations/infra-solr-log4j/infra_log_maxbackupindex',9)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
index 46535cd..8d72f42 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
@@ -72,6 +72,15 @@ def setup_infra_solr(name = None):
group=params.user_group
)
+ security_json_file_location = format("{infra_solr_conf}/security.json")
+
+ File(security_json_file_location,
+ content=InlineTemplate(params.infra_solr_security_json_content),
+ owner=params.infra_solr_user,
+ group=params.user_group,
+ mode=0644
+ )
+
jaas_file = params.infra_solr_jaas_file if params.security_enabled else None
url_scheme = 'https' if params.infra_solr_ssl_enabled else 'http'
@@ -96,7 +105,8 @@ def setup_infra_solr(name = None):
solr_znode=params.infra_solr_znode,
jaas_file=jaas_file,
java64_home=params.java64_home,
- secure=params.security_enabled
+ secure=params.security_enabled,
+ security_json_location=security_json_file_location
)
if params.security_enabled:
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2
new file mode 100644
index 0000000..d8aea24
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/properties/infra-solr-security.json.j2
@@ -0,0 +1,22 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+{
+ "authentication": {
+ "class": "org.apache.solr.security.KerberosPlugin"
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py b/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
index 6af463c..cd88fec 100644
--- a/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
+++ b/ambari-server/src/test/python/stacks/2.4/AMBARI_INFRA/test_infra_solr.py
@@ -95,6 +95,13 @@ class TestInfraSolr(RMFTestCase):
content = InlineTemplate(self.getConfig()['configurations']['infra-solr-log4j']['content'])
)
+ self.assertResourceCalled('File', '/etc/ambari-infra-solr/conf/security.json',
+ owner = 'solr',
+ group='hadoop',
+ content = InlineTemplate(self.getConfig()['configurations']['infra-solr-security-json']['content']),
+ mode = 0644
+ )
+
self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --create-znode --retry 30 --interval 5')
self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181/infra-solr --cluster-prop --property-name urlScheme --property-value http')
self.assertResourceCalled('Execute', 'ambari-sudo.sh JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /usr/lib/ambari-infra-solr-client/solrCloudCli.sh --zookeeper-connect-string c6401.ambari.apache.org:2181 --znode /infra-solr --setup-kerberos-plugin')
http://git-wip-us.apache.org/repos/asf/ambari/blob/e0765d92/ambari-server/src/test/python/stacks/2.4/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index dd8a096..7a940d3 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -239,6 +239,9 @@
"logsearch_admin_password" : "admin",
"content": "admin json content"
},
+ "infra-solr-security-json" : {
+ "content" : "{\"authentication\": \"org.apache.solr.security.KerberosPlugin\"}"
+ },
"infra-solr-client-log4j" : {
"infra_solr_client_log_dir" : "/var/log/ambari-infra-solr-client",
"content" : "content"
[17/30] ambari git commit: AMBARI-19756. Add common log rotation
settings to hive log4j2 (Madhuvanthi Radhakrishnan via smohanty)
Posted by nc...@apache.org.
AMBARI-19756. Add common log rotation settings to hive log4j2 (Madhuvanthi Radhakrishnan via smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c977113c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c977113c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c977113c
Branch: refs/heads/branch-dev-patch-upgrade
Commit: c977113ccb18218befbb86479353f60bda4eb1d7
Parents: 1eb4ca5
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sun Jan 29 22:48:19 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sun Jan 29 22:48:19 2017 -0800
----------------------------------------------------------------------
.../package/scripts/hive_interactive.py | 4 +--
.../0.12.0.2.0/package/scripts/params_linux.py | 8 ++++++
.../stacks/HDP/2.3/upgrades/config-upgrade.xml | 20 ++++++++++++++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 12 +++++++++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 12 +++++++++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 12 +++++++++
.../stacks/HDP/2.3/upgrades/upgrade-2.4.xml | 11 ++++++++
.../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 11 ++++++++
.../stacks/HDP/2.3/upgrades/upgrade-2.6.xml | 11 ++++++++
.../stacks/HDP/2.4/upgrades/config-upgrade.xml | 20 ++++++++++++++
.../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 14 ++++++++++
.../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 12 +++++++++
.../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 11 ++++++++
.../stacks/HDP/2.4/upgrades/upgrade-2.6.xml | 11 ++++++++
.../services/HIVE/configuration/hive-log4j2.xml | 25 ++++++++++++++++-
.../HIVE/configuration/llap-cli-log4j2.xml | 25 ++++++++++++++++-
.../stacks/HDP/2.5/upgrades/config-upgrade.xml | 28 ++++++++++++++++++++
.../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 14 +++++++++-
.../stacks/HDP/2.5/upgrades/upgrade-2.6.xml | 6 +++++
.../stacks/2.5/HIVE/test_hive_server_int.py | 8 +++---
20 files changed, 266 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 335a850..760752f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -234,14 +234,14 @@ def hive_interactive(name=None):
mode=mode_identified,
group=params.user_group,
owner=params.hive_user,
- content=params.llap_cli_log4j2)
+ content=InlineTemplate(params.llap_cli_log4j2))
hive_log4j2_filename = 'hive-log4j2.properties'
File(format("{hive_server_interactive_conf_dir}/{hive_log4j2_filename}"),
mode=mode_identified,
group=params.user_group,
owner=params.hive_user,
- content=params.hive_log4j2)
+ content=InlineTemplate(params.hive_log4j2))
hive_exec_log4j2_filename = 'hive-exec-log4j2.properties'
File(format("{hive_server_interactive_conf_dir}/{hive_exec_log4j2_filename}"),
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 4907f93..adac228 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -603,6 +603,14 @@ has_hive_interactive = len(hive_interactive_hosts) > 0
hive_llap_log_maxfilesize = default('/configurations/llap-daemon-log4j/hive_llap_log_maxfilesize', 256)
hive_llap_log_maxbackupindex = default('/configurations/llap-daemon-log4j/hive_llap_log_maxbackupindex', 240)
+#hive log4j2 properties
+hive2_log_maxfilesize = default('/configurations/hive-log4j2/hive2_log_maxfilesize', 256)
+hive2_log_maxbackupindex = default('/configurations/hive-log4j2/hive2_log_maxbackupindex', 30)
+
+#llap cli log4j2 properties
+llap_cli_log_maxfilesize = default('/configurations/llap-cli-log4j2/llap_cli_log_maxfilesize', 256)
+llap_cli_log_maxbackupindex = default('/configurations/llap-cli-log4j2/llap_cli_log_maxbackupindex', 30)
+
if has_hive_interactive:
llap_daemon_log4j = config['configurations']['llap-daemon-log4j']['content']
llap_cli_log4j2 = config['configurations']['llap-cli-log4j2']['content']
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index efa6e70..1f3b5ad 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -191,9 +191,29 @@
<regex-replace key="content" find="property.llap.daemon.log.maxfilesize = ([0-9]+)MB" replace-with="property.llap.daemon.log.maxfilesize = {{hive_llap_log_maxfilesize}}MB"/>
<regex-replace key="content" find="property.llap.daemon.log.maxbackupindex = ([0-9]+)" replace-with="property.llap.daemon.log.maxbackupindex = {{hive_llap_log_maxbackupindex}}"/>
</definition>
+ </changes>
+ </component>
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <changes>
+ <definition xsi:type="configure" id="hive_log4j2_parameterize" summary="Parameterizing Hive Log4J2 Properties">
+ <type>hive-log4j2</type>
+ <set key="hive2_log_maxfilesize" value="256"/>
+ <set key = "hive2_log_maxbackupindex" value="30"/>
+ <regex-replace key="content" find="appender.DRFA.strategy.max = ([0-9]+)" replace-with="appender.DRFA.strategy.max = {{hive2_log_maxbackupindex}}"/>
+ <replace key="content" find="appender.DRFA.strategy.type = DefaultRolloverStrategy" replace-with="appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{hive2_log_maxfilesize}}MB"/>
+ </definition>
+
+ <definition xsi:type="configure" id="llap_cli_log4j2_parameterize" summary="Parameterizing LLAP Cli Log4J2 Properties">
+ <type>llap-cli-log4j2</type>
+ <set key="llap_cli_log_maxfilesize" value="256"/>
+ <set key = "llap_cli_log_maxbackupindex" value="30"/>
+ <regex-replace key="content" find="appender.DRFA.strategy.max = ([0-9]+)" replace-with="appender.DRFA.strategy.max = {{llap_cli_log_maxbackupindex}}"/>
+ <replace key="content" find="appender.DRFA.strategy.type = DefaultRolloverStrategy" replace-with="appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{llap_cli_log_maxfilesize}}MB"/>
+ </definition>
</changes>
</component>
+
<component name="WEBHCAT_SERVER">
<changes>
<definition xsi:type="configure" id="hdp_2_4_0_0_webhcat_server_update_configuration_paths" summary="Updating Configuration Paths">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index 619fa5d..9e13036 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -338,6 +338,18 @@
<task xsi:type="configure" id="hdp_2_4_0_0_hive_server_configure_authentication"/>
</execute-stage>
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing LLAP Cli Log4J2 Properties">
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize">
+ <summary>Updating the LLAP Cli Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing Hive Log4J2 Properties">
+ <task xsi:type="configure" id="hive_log4j2_parameterize">
+ <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
<!--OOZIE-->
<execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index b6d98aa..a3407e0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -374,6 +374,18 @@
</task>
</execute-stage>
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing LLAP Cli Log4J2 Properties">
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize">
+ <summary>Updating the LLAP Cli Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing Hive Log4J2 Properties">
+ <task xsi:type="configure" id="hive_log4j2_parameterize">
+ <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
<execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
<task xsi:type="configure" id="webhcat_log4j_parameterize">
<summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
index 03fd683..a9b1480 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml
@@ -400,6 +400,18 @@
</task>
</execute-stage>
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing LLAP Cli Log4J2 Properties">
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize">
+ <summary>Updating the LLAP Cli Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing Hive Log4J2 Properties">
+ <task xsi:type="configure" id="hive_log4j2_parameterize">
+ <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
<!--OOZIE-->
<execute-stage service="OOZIE" component="OOZIE_SERVER" title="Apply config changes for Oozie Server">
<task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.OozieConfigCalculation">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index 743dd1b..57f7984 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -789,6 +789,17 @@
<task xsi:type="restart-task" />
</upgrade>
</component>
+
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hive_log4j2_parameterize"/>
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize"/>
+ </pre-upgrade>
+ <pre-downgrade/>
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
</service>
<service name="SLIDER">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index 8db7669..6d45362 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -916,6 +916,17 @@
<task xsi:type="restart-task" />
</upgrade>
</component>
+
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hive_log4j2_parameterize"/>
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize"/>
+ </pre-upgrade>
+ <pre-downgrade/>
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
</service>
<service name="SLIDER">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
index 981947f..a65b32c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.6.xml
@@ -917,6 +917,17 @@
<task xsi:type="restart-task" />
</upgrade>
</component>
+
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hive_log4j2_parameterize"/>
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize"/>
+ </pre-upgrade>
+ <pre-downgrade/>
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
</service>
<service name="SLIDER">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index 0a6ea5e..7179099 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -92,6 +92,26 @@
<regex-replace key="content" find="property.llap.daemon.log.maxfilesize = ([0-9]+)MB" replace-with="property.llap.daemon.log.maxfilesize = {{hive_llap_log_maxfilesize}}MB"/>
<regex-replace key="content" find="property.llap.daemon.log.maxbackupindex = ([0-9]+)" replace-with="property.llap.daemon.log.maxbackupindex = {{hive_llap_log_maxbackupindex}}"/>
</definition>
+ </changes>
+ </component>
+
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <changes>
+ <definition xsi:type="configure" id="hive_log4j2_parameterize" summary="Parameterizing Hive Log4J2 Properties">
+ <type>hive-log4j2</type>
+ <set key="hive2_log_maxfilesize" value="256"/>
+ <set key = "hive2_log_maxbackupindex" value="30"/>
+ <regex-replace key="content" find="appender.DRFA.strategy.max = ([0-9]+)" replace-with="appender.DRFA.strategy.max = {{hive2_log_maxbackupindex}}"/>
+ <replace key="content" find="appender.DRFA.strategy.type = DefaultRolloverStrategy" replace-with="appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{hive2_log_maxfilesize}}MB"/>
+ </definition>
+
+ <definition xsi:type="configure" id="llap_cli_log4j2_parameterize" summary="Parameterizing LLAP Cli Log4J2 Properties">
+ <type>llap-cli-log4j2</type>
+ <set key="llap_cli_log_maxfilesize" value="256"/>
+ <set key = "llap_cli_log_maxbackupindex" value="30"/>
+ <regex-replace key="content" find="appender.DRFA.strategy.max = ([0-9]+)" replace-with="appender.DRFA.strategy.max = {{llap_cli_log_maxbackupindex}}"/>
+ <replace key="content" find="appender.DRFA.strategy.type = DefaultRolloverStrategy" replace-with="appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{llap_cli_log_maxfilesize}}MB"/>
+ </definition>
</changes>
</component>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index d41c4eb..67e9314 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -428,17 +428,31 @@
<summary>Updating the Hive Log4J properties to include parameterizations</summary>
</task>
</execute-stage>
+
<execute-stage service="HIVE" component="HIVE_SERVER" title="Parameterizing Hive LLAP Log4J Properties">
<task xsi:type="configure" id="hive_llap_log4j_parameterize">
<summary>Updating the Hive llap Log4J properties to include parameterizations</summary>
</task>
</execute-stage>
+
<execute-stage service="HIVE" component="WEBHCAT_SERVER" title="Parameterizing WebHCat Log4J Properties">
<task xsi:type="configure" id="webhcat_log4j_parameterize">
<summary>Updating the Webhcat Log4J properties to include parameterizations</summary>
</task>
</execute-stage>
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing LLAP Cli Log4J2 Properties">
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize">
+ <summary>Updating the LLAP Cli Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing Hive Log4J2 Properties">
+ <task xsi:type="configure" id="hive_log4j2_parameterize">
+ <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
<!-- HBASE -->
<execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
<task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
index e5c51ad..7b9b062 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml
@@ -440,6 +440,18 @@
</task>
</execute-stage>
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing LLAP Cli Log4J2 Properties">
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize">
+ <summary>Updating the LLAP Cli Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing Hive Log4J2 Properties">
+ <task xsi:type="configure" id="hive_log4j2_parameterize">
+ <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
<!-- HBASE -->
<execute-stage service="HBASE" component="HBASE_MASTER" title="Apply config changes for Hbase Master">
<task xsi:type="configure" id="hdp_2_5_0_0_remove_ranger_hbase_audit_db"/>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index d2c6ff5..1ee9ea2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -907,6 +907,17 @@
<task xsi:type="restart-task" />
</upgrade>
</component>
+
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hive_log4j2_parameterize"/>
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize"/>
+ </pre-upgrade>
+ <pre-downgrade/>
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
</service>
<service name="SLIDER">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
index 8b463dd..781149c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.6.xml
@@ -913,6 +913,17 @@
<task xsi:type="restart-task" />
</upgrade>
</component>
+
+ <component name="HIVE_SERVER_INTERACTIVE">
+ <pre-upgrade>
+ <task xsi:type="configure" id="hive_log4j2_parameterize"/>
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize"/>
+ </pre-upgrade>
+ <pre-downgrade/>
+ <upgrade>
+ <task xsi:type="restart-task" />
+ </upgrade>
+ </component>
</service>
<service name="SLIDER">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
index abf307a..01d556f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/hive-log4j2.xml
@@ -21,6 +21,27 @@
-->
<configuration supports_final="false" supports_adding_forbidden="false">
<property>
+ <name>hive2_log_maxfilesize</name>
+ <value>256</value>
+ <description>The maximum size of backup file before the log is rotated</description>
+ <display-name>Hive Log2: backup file size</display-name>
+ <value-attributes>
+ <unit>MB</unit>
+ </value-attributes>
+ <on-ambari-upgrade add="false"/>
+ </property>
+ <property>
+ <name>hive2_log_maxbackupindex</name>
+ <value>30</value>
+ <description>The number of backup files</description>
+ <display-name>Hive Log2: # of backup files</display-name>
+ <value-attributes>
+ <type>int</type>
+ <minimum>0</minimum>
+ </value-attributes>
+ <on-ambari-upgrade add="false"/>
+ </property>
+ <property>
<name>content</name>
<display-name>hive-log4j2 template</display-name>
<description>Custom hive-log4j2.properties</description>
@@ -74,7 +95,9 @@ appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
appender.DRFA.policies.time.modulate = true
appender.DRFA.strategy.type = DefaultRolloverStrategy
-appender.DRFA.strategy.max = 30
+appender.DRFA.strategy.max = {{hive2_log_maxbackupindex}}
+appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
+appender.DRFA.policies.fsize.size = {{hive2_log_maxfilesize}}MB
# list of all loggers
loggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
index 67f5830..ff93265 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/configuration/llap-cli-log4j2.xml
@@ -21,6 +21,27 @@
-->
<configuration supports_final="false" supports_adding_forbidden="false">
<property>
+ <name>llap_cli_log_maxfilesize</name>
+ <value>256</value>
+ <description>The maximum size of backup file before the log is rotated</description>
+ <display-name>LLAP Client Log2: backup file size</display-name>
+ <value-attributes>
+ <unit>MB</unit>
+ </value-attributes>
+ <on-ambari-upgrade add="false"/>
+ </property>
+ <property>
+ <name>llap_cli_log_maxbackupindex</name>
+ <value>30</value>
+ <description>The number of backup files</description>
+ <display-name>LLAP Client Log2: # of backup files</display-name>
+ <value-attributes>
+ <type>int</type>
+ <minimum>0</minimum>
+ </value-attributes>
+ <on-ambari-upgrade add="false"/>
+ </property>
+ <property>
<name>content</name>
<display-name>llap-cli-log4j2 template</display-name>
<description>Custom llap-cli-log4j2.properties</description>
@@ -74,7 +95,9 @@ appender.DRFA.policies.time.type = TimeBasedTriggeringPolicy
appender.DRFA.policies.time.interval = 1
appender.DRFA.policies.time.modulate = true
appender.DRFA.strategy.type = DefaultRolloverStrategy
-appender.DRFA.strategy.max = 30
+appender.DRFA.strategy.max = {{llap_cli_log_maxbackupindex}}
+appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
+appender.DRFA.policies.fsize.size = {{llap_cli_log_maxfilesize}}MB
# list of all loggers
loggers = ZooKeeper, DataNucleus, Datastore, JPOX, HadoopConf
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
index ddffc79..4bda941 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/config-upgrade.xml
@@ -315,6 +315,34 @@
<type>hive-interactive-env</type>
<insert key="llap_java_opts" value="{{heap_dump_opts}}" insert-type="append" newline-before="false" newline-after="false" />
</definition>
+
+ <definition xsi:type="configure" id="hive_log4j2_parameterize" summary="Parameterizing Hive Log4J2 Properties">
+ <type>hive-log4j2</type>
+ <set key="hive2_log_maxfilesize" value="256"/>
+ <set key = "hive2_log_maxbackupindex" value="30"/>
+ <regex-replace key="content" find="appender.DRFA.strategy.max = ([0-9]+)" replace-with="appender.DRFA.strategy.max = {{hive2_log_maxbackupindex}}"/>
+ <replace key="content" find="appender.DRFA.strategy.type = DefaultRolloverStrategy" replace-with="appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{hive2_log_maxfilesize}}MB"/>
+ </definition>
+
+ <definition xsi:type="configure" id="llap_cli_log4j2_parameterize" summary="Parameterizing LLAP Cli Log4J2 Properties">
+ <type>llap-cli-log4j2</type>
+ <set key="llap_cli_log_maxfilesize" value="256"/>
+ <set key = "llap_cli_log_maxbackupindex" value="30"/>
+ <regex-replace key="content" find="appender.DRFA.strategy.max = ([0-9]+)" replace-with="appender.DRFA.strategy.max = {{llap_cli_log_maxbackupindex}}"/>
+ <replace key="content" find="appender.DRFA.strategy.type = DefaultRolloverStrategy" replace-with="appender.DRFA.strategy.type = DefaultRolloverStrategy
appender.DRFA.policies.fsize.type = SizeBasedTriggeringPolicy
appender.DRFA.policies.fsize.size = {{llap_cli_log_maxfilesize}}MB"/>
+ </definition>
+ </changes>
+
+ </component>
+ <component name = "WEBHCAT_SERVER">
+ <changes>
+ <definition xsi:type="configure" id="webhcat_log4j_parameterize" summary="Parameterizing Webhcat Log4J Properties">
+ <type>webhcat-log4j</type>
+ <set key="webhcat_log_maxfilesize" value="256"/>
+ <set key = "webhcat_log_maxbackupindex" value="20"/>
+ <replace key="content" find="log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender
log4j.appender.standard.MaxFileSize = {{webhcat_log_maxfilesize}}MB"/>
+ <replace key="content" find="log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender" replace-with="log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender
log4j.appender.standard.MaxBackupIndex = {{webhcat_log_maxbackupindex}}"/>
+ </definition>
</changes>
</component>
<component name = "WEBHCAT_SERVER">
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
index ba33a7d..8092506 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml
@@ -444,7 +444,19 @@
<execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Appending Java heap dump options for HiveSever2 Interactive">
<task xsi:type="configure" id="hdp_2_6_0_0_hive_llap_append_java_heap_dump_options"/>
- </execute-stage>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing LLAP Cli Log4J2 Properties">
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize">
+ <summary>Updating the LLAP Cli Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
+
+ <execute-stage service="HIVE" component="HIVE_SERVER_INTERACTIVE" title="Parameterizing Hive Log4J2 Properties">
+ <task xsi:type="configure" id="hive_log4j2_parameterize">
+ <summary>Updating the Hive Log4J2 properties to include parameterizations</summary>
+ </task>
+ </execute-stage>
</group>
<!--
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
index 790e50c..a739551 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.6.xml
@@ -783,6 +783,8 @@
<task xsi:type="configure" id="hdp_2_6_0_0_hive_llap_append_java_heap_dump_options"/>
<task xsi:type="configure" id="hdp_2_6_0_0_hive_llap_append_heap_dump_options"/>
<task xsi:type="configure" id="webhcat_log4j_parameterize" />
+ <task xsi:type="configure" id="hive_log4j2_parameterize"/>
+ <task xsi:type="configure" id="llap_cli_log4j2_parameterize"/>
</pre-upgrade>
<pre-downgrade />
@@ -793,6 +795,10 @@
</component>
<component name="WEBHCAT_SERVER">
+ <pre-upgrade>
+ <task xsi:type="configure" id="webhcat_log4j_parameterize" />
+ </pre-upgrade>
+ <pre-downgrade/>
<upgrade>
<task xsi:type="restart-task" />
</upgrade>
http://git-wip-us.apache.org/repos/asf/ambari/blob/c977113c/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
index 2bc28e5..fb97612 100644
--- a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -566,13 +566,13 @@ class TestHiveServerInteractive(RMFTestCase):
mode=0600,
)
self.assertResourceCalled('File', os.path.join(conf_dir, 'llap-cli-log4j2.properties'),
- content='con\ntent',
+ content=InlineTemplate('con\ntent'),
owner='hive',
group='hadoop',
mode=0600,
)
self.assertResourceCalled('File', os.path.join(conf_dir, 'hive-log4j2.properties'),
- content='con\ntent', # Test new line
+ content=InlineTemplate('con\ntent'), # Test new line
owner='hive',
group='hadoop',
mode=0600,
@@ -631,13 +631,13 @@ class TestHiveServerInteractive(RMFTestCase):
mode=0644,
)
self.assertResourceCalled('File', os.path.join(conf_dir, 'llap-cli-log4j2.properties'),
- content='con\ntent',
+ content=InlineTemplate('con\ntent'),
owner='hive',
group='hadoop',
mode=0644,
)
self.assertResourceCalled('File', os.path.join(conf_dir, 'hive-log4j2.properties'),
- content='con\ntent', # Test new line
+ content=InlineTemplate('con\ntent'), # Test new line
owner='hive',
group='hadoop',
mode=0644,
[24/30] ambari git commit: AMBARI-19753. Hive2 View - Switching tabs
after executing query loses state (pallavkul)
Posted by nc...@apache.org.
AMBARI-19753. Hive2 View - Switching tabs after executing query loses state (pallavkul)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/589f567e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/589f567e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/589f567e
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 589f567e37800ff606f5c40eca99508ce8910899
Parents: 75c126e
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 30 15:24:03 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 30 15:24:03 2017 +0530
----------------------------------------------------------------------
.../src/main/resources/ui/app/models/worksheet.js | 3 ++-
.../src/main/resources/ui/app/routes/queries/query.js | 12 ++++++------
.../main/resources/ui/app/templates/queries/query.hbs | 2 +-
3 files changed, 9 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/589f567e/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
index 9021e90..6a77633 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
@@ -35,6 +35,7 @@ export default DS.Model.extend({
selectedMultiDb: DS.attr(),
queryFile: DS.attr('string', {defaultValue: ""}),
logFile: DS.attr('string', {defaultValue: ""}),
- logResults: DS.attr('string', {defaultValue: ""})
+ logResults: DS.attr('string', {defaultValue: ""}),
+ isQueryRunning: DS.attr('boolean', {defaultValue: false})
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/589f567e/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 7d387f0..753b7f7 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -75,10 +75,10 @@ export default Ember.Route.extend({
selectedMultiDb.pushObject(selecteDBName);
controller.set('worksheet', model);
- controller.set('selectedTablesModels',this.get('controller.model').get('selectedTablesModels') || selectedTablesModels );
+ controller.set('selectedTablesModels',model.get('selectedTablesModels') || selectedTablesModels );
- controller.set('selectedMultiDb', this.get('controller.model').get('selectedMultiDb') || selectedMultiDb);
- controller.set('isQueryRunning', false);
+ controller.set('selectedMultiDb', model.get('selectedMultiDb') || selectedMultiDb);
+ controller.set('isQueryRunning', model.get('isQueryRunning'));
controller.set('currentQuery', model.get('query'));
controller.set('queryResult', model.get('queryResult'));
controller.set('currentJobId', null);
@@ -143,7 +143,7 @@ export default Ember.Route.extend({
let worksheetTitle = this.get('controller.model').get('title');
self.get('controller.model').set('jobData', []);
- self.get('controller').set('isQueryRunning', true);
+ self.get('controller.model').set('isQueryRunning', true);
//Making the result set emply every time query runs.
self.get('controller').set('queryResult', self.get('controller').get('queryResult'));
@@ -247,7 +247,7 @@ export default Ember.Route.extend({
self.get('controller').set('queryResult', data);
self.get('controller.model').set('queryResult', data);
- self.get('controller').set('isQueryRunning', false);
+ self.get('controller.model').set('isQueryRunning', false);
let localArr = self.get('controller.model').get("jobData");
localArr.push(data);
@@ -299,7 +299,7 @@ export default Ember.Route.extend({
console.log('getJob route', data );
self.get('controller').set('queryResult', data);
self.get('controller.model').set('queryResult', data);
- self.get('controller').set('isQueryRunning', false);
+ self.get('controller.model').set('isQueryRunning', false);
self.get('controller.model').set('hidePreviousButton', false);
let localArr = self.get('controller.model').get("jobData");
http://git-wip-us.apache.org/repos/asf/ambari/blob/589f567e/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 7f91d6c..5cdc9b3 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -30,7 +30,7 @@
<div class="row query-editor-controls">
<button class="btn btn-success" {{action "executeQuery" }}>{{fa-icon "check"}} Execute</button>
<button class="btn btn-default" {{action "openWorksheetModal" }}>{{fa-icon "save"}} Save As</button>
- {{#if isQueryRunning}}
+ {{#if worksheet.isQueryRunning}}
{{fa-icon "spinner fa-1-5" spin=true}}
{{/if}}
</div>
[05/30] ambari git commit: AMBARI-19751: Credential Store params
should be in the command json for RESTART
Posted by nc...@apache.org.
AMBARI-19751: Credential Store params should be in the command json for RESTART
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9d8a54c2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9d8a54c2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9d8a54c2
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9d8a54c23146db595e81d72785c4f9779f3bee00
Parents: 65eb886
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Fri Jan 27 09:56:03 2017 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Fri Jan 27 12:32:59 2017 -0800
----------------------------------------------------------------------
.../AmbariCustomCommandExecutionHelper.java | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9d8a54c2/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 8b851ac..93f4a8f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -175,6 +175,8 @@ public class AmbariCustomCommandExecutionHelper {
@Inject
private HostRoleCommandDAO hostRoleCommandDAO;
+ private Map<String, Map<String, Map<String, String>>> configCredentialsForService = new HashMap<>();
+
protected static final String SERVICE_CHECK_COMMAND_NAME = "SERVICE_CHECK";
protected static final String START_COMMAND_NAME = "START";
protected static final String RESTART_COMMAND_NAME = "RESTART";
@@ -382,6 +384,20 @@ public class AmbariCustomCommandExecutionHelper {
execCmd.setAvailableServicesFromServiceInfoMap(ambariMetaInfo.getServices(stackId.getStackName(), stackId.getStackVersion()));
+ // Get the value of credential store enabled from the DB
+ Service clusterService = cluster.getService(serviceName);
+ execCmd.setCredentialStoreEnabled(String.valueOf(clusterService.isCredentialStoreEnabled()));
+
+ // Get the map of service config type to password properties for the service
+ Map<String, Map<String, String>> configCredentials;
+ configCredentials = configCredentialsForService.get(clusterService.getName());
+ if (configCredentials == null) {
+ configCredentials = configHelper.getCredentialStoreEnabledProperties(stackId, clusterService);
+ configCredentialsForService.put(clusterService.getName(), configCredentials);
+ }
+
+ execCmd.setConfigurationCredentials(configCredentials);
+
Map<String, String> hostLevelParams = new TreeMap<>();
hostLevelParams.put(CUSTOM_COMMAND, commandName);
[02/30] ambari git commit: AMBARI-19641: Deleted configuration groups
are displayed along with active group (Amruta Borkar via dili)
Posted by nc...@apache.org.
AMBARI-19641: Deleted configuration groups are displayed along with active group (Amruta Borkar via dili)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d02a7a5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d02a7a5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d02a7a5
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6d02a7a5b3415a87b0cbcf21d360942719efcb9d
Parents: dcdf95b
Author: Di Li <di...@apache.org>
Authored: Fri Jan 27 14:13:55 2017 -0500
Committer: Di Li <di...@apache.org>
Committed: Fri Jan 27 14:13:55 2017 -0500
----------------------------------------------------------------------
.../orm/entities/ServiceConfigEntity.java | 2 +-
.../server/orm/dao/ServiceConfigDAOTest.java | 66 ++++++++++++++++----
2 files changed, 56 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6d02a7a5/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
index 2d6ca20..8a1b316 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
@@ -50,7 +50,7 @@ import javax.persistence.TableGenerator;
@NamedQuery(name = "ServiceConfigEntity.findNextServiceConfigVersion", query = "SELECT COALESCE(MAX(serviceConfig.version), 0) + 1 AS nextVersion FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.serviceName=:serviceName AND serviceConfig.clusterId=:clusterId"),
@NamedQuery(name = "ServiceConfigEntity.findAllServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId AND serviceConfig.stack=:stack"),
@NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId = :clusterId AND serviceConfig.version = (SELECT MAX(serviceConfig2.version) FROM ServiceConfigEntity serviceConfig2 WHERE serviceConfig2.clusterId=:clusterId AND serviceConfig2.stack=:stack AND serviceConfig2.serviceName = serviceConfig.serviceName)"),
- @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
+ @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND (scv.groupId = null OR scv.groupId IN (SELECT cg.groupId from ConfigGroupEntity cg)) AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
@NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByCluster", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceConfigId IN (SELECT MAX(scv1.serviceConfigId) FROM ServiceConfigEntity scv1 WHERE (scv1.clusterId = :clusterId) AND (scv1.groupId IS NULL) GROUP BY scv1.serviceName)")})
public class ServiceConfigEntity {
@Id
http://git-wip-us.apache.org/repos/asf/ambari/blob/6d02a7a5/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ServiceConfigDAOTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ServiceConfigDAOTest.java b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ServiceConfigDAOTest.java
index aafe557..5890c35 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ServiceConfigDAOTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/orm/dao/ServiceConfigDAOTest.java
@@ -262,23 +262,43 @@ public class ServiceConfigDAOTest {
@Test
public void testGetLastServiceConfigsForService() throws Exception {
String serviceName = "HDFS";
+ Clusters clusters = injector.getInstance(Clusters.class);
+ clusters.addCluster("c1", HDP_01);
+ ConfigGroupEntity configGroupEntity1 = new ConfigGroupEntity();
+ ClusterEntity clusterEntity = clusterDAO.findById(1L);
+ configGroupEntity1.setClusterEntity(clusterEntity);
+ configGroupEntity1.setClusterId(clusterEntity.getClusterId());
+ configGroupEntity1.setGroupName("group1");
+ configGroupEntity1.setDescription("group1_desc");
+ configGroupEntity1.setTag("HDFS");
+ configGroupEntity1.setServiceName("HDFS");
+ configGroupDAO.create(configGroupEntity1);
+ ConfigGroupEntity group1 = configGroupDAO.findByName("group1");
+ ConfigGroupEntity configGroupEntity2 = new ConfigGroupEntity();
+ configGroupEntity2.setClusterEntity(clusterEntity);
+ configGroupEntity2.setClusterId(clusterEntity.getClusterId());
+ configGroupEntity2.setGroupName("group2");
+ configGroupEntity2.setDescription("group2_desc");
+ configGroupEntity2.setTag("HDFS");
+ configGroupEntity2.setServiceName("HDFS");
+ configGroupDAO.create(configGroupEntity2);
+ ConfigGroupEntity group2 = configGroupDAO.findByName("group2");
createServiceConfig(serviceName, "admin", 1L, 1L, 1111L, null);
createServiceConfig(serviceName, "admin", 2L, 2L, 1010L, null);
- createServiceConfigWithGroup(serviceName, "admin", 3L, 3L, 2222L, null, 1L);
- createServiceConfigWithGroup(serviceName, "admin", 5L, 5L, 3333L, null, 2L);
- createServiceConfigWithGroup(serviceName, "admin", 4L, 4L, 3330L, null, 2L);
-
- List<ServiceConfigEntity> serviceConfigEntities =
- serviceConfigDAO.getLastServiceConfigsForService(clusterDAO.findByName("c1").getClusterId(), serviceName);
+ createServiceConfigWithGroup(serviceName, "admin", 3L, 3L, 2222L, null, group1.getGroupId());
+ createServiceConfigWithGroup(serviceName, "admin", 5L, 5L, 3333L, null, group2.getGroupId());
+ createServiceConfigWithGroup(serviceName, "admin", 4L, 4L, 3330L, null, group2.getGroupId());
+ List<ServiceConfigEntity> serviceConfigEntities = serviceConfigDAO
+ .getLastServiceConfigsForService(clusterDAO.findByName("c1").getClusterId(), serviceName);
Assert.assertNotNull(serviceConfigEntities);
Assert.assertEquals(3, serviceConfigEntities.size());
- for (ServiceConfigEntity sce: serviceConfigEntities) {
- if (sce.getGroupId() != null && sce.getGroupId().equals(2L)) {
- // Group ID with the highest version should be selected
- Assert.assertEquals(sce.getVersion(), Long.valueOf(5L));
- }
+ for (ServiceConfigEntity sce : serviceConfigEntities) {
+ if (sce.getGroupId() != null && sce.getGroupId().equals(group2.getGroupId())) {
+ // Group ID with the highest version should be selected
+ Assert.assertEquals(sce.getVersion(), Long.valueOf(5L));
+ }
}
}
@@ -525,6 +545,30 @@ public class ServiceConfigDAOTest {
}
}
+ @Test
+ public void testGetLastServiceConfigsForServiceWhenAConfigGroupIsDeleted() throws Exception {
+ Clusters clusters = injector.getInstance(Clusters.class);
+ clusters.addCluster("c1", HDP_01);
+ initClusterEntitiesWithConfigGroups();
+ ConfigGroupEntity configGroupEntity1 = new ConfigGroupEntity();
+ ClusterEntity clusterEntity = clusterDAO.findById(1L);
+ configGroupEntity1.setClusterEntity(clusterEntity);
+ configGroupEntity1.setClusterId(clusterEntity.getClusterId());
+ configGroupEntity1.setGroupName("toTestDeleteGroup_OOZIE");
+ configGroupEntity1.setDescription("toTestDeleteGroup_OOZIE_DESC");
+ configGroupEntity1.setTag("OOZIE");
+ configGroupEntity1.setServiceName("OOZIE");
+ configGroupDAO.create(configGroupEntity1);
+ ConfigGroupEntity testDeleteGroup_OOZIE = configGroupDAO.findByName("toTestDeleteGroup_OOZIE");
+ createServiceConfigWithGroup("OOZIE", "", 2L, 2L, System.currentTimeMillis(), null,
+ testDeleteGroup_OOZIE.getGroupId());
+ Collection<ServiceConfigEntity> serviceConfigEntityList = serviceConfigDAO.getLastServiceConfigsForService(1L,
+ "OOZIE");
+ Assert.assertEquals(2, serviceConfigEntityList.size());
+ configGroupDAO.remove(configGroupEntity1);
+ serviceConfigEntityList = serviceConfigDAO.getLastServiceConfigsForService(1L, "OOZIE");
+ Assert.assertEquals(1, serviceConfigEntityList.size());
+ }
private void initClusterEntities() throws Exception{
String userName = "admin";
[19/30] ambari git commit: AMBARI-19770. Hive View 2.0: New Job and
New Table button does nothing. (dipayanb)
Posted by nc...@apache.org.
AMBARI-19770. Hive View 2.0: New Job and New Table button does nothing. (dipayanb)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/985dafff
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/985dafff
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/985dafff
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 985dafff5330b97e4b92f1c4cf67ee201a8e6edd
Parents: ea1fbaa
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Mon Jan 30 14:26:07 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Jan 30 14:28:04 2017 +0530
----------------------------------------------------------------------
.../hive20/src/main/resources/ui/app/app.js | 9 ++++
.../resources/ui/app/controllers/queries.js | 22 +++++++++
.../main/resources/ui/app/models/worksheet.js | 2 +-
.../hive20/src/main/resources/ui/app/router.js | 2 +
.../main/resources/ui/app/routes/application.js | 4 +-
.../ui/app/routes/databases/newtable.js | 30 ++++++++++++
.../src/main/resources/ui/app/routes/queries.js | 49 +-------------------
.../resources/ui/app/routes/queries/index.js | 9 ++--
.../main/resources/ui/app/routes/queries/new.js | 38 +++++++++++++++
.../resources/ui/app/routes/queries/query.js | 17 ++++---
.../src/main/resources/ui/app/services/jobs.js | 6 ++-
.../components/top-application-bar.hbs | 4 +-
.../src/main/resources/ui/config/environment.js | 2 +
13 files changed, 130 insertions(+), 64 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/app.js b/contrib/views/hive20/src/main/resources/ui/app/app.js
index af4fdc4..0f83ee5 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/app.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/app.js
@@ -26,6 +26,15 @@ let App;
Ember.MODEL_FACTORY_INJECTIONS = true;
App = Ember.Application.extend({
+ // Basic logging, e.g. "Transitioned into 'post'"
+ LOG_TRANSITIONS: false,
+
+ // Extremely detailed logging, highlighting every internal
+ // step made while transitioning into a route, including
+ // `beforeModel`, `model`, and `afterModel` hooks, and
+ // information about redirects and aborted transitions
+ LOG_TRANSITIONS_INTERNAL: false,
+
modulePrefix: config.modulePrefix,
podModulePrefix: config.podModulePrefix,
Resolver
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/controllers/queries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/controllers/queries.js b/contrib/views/hive20/src/main/resources/ui/app/controllers/queries.js
new file mode 100644
index 0000000..dc99fd1
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/controllers/queries.js
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Controller.extend({
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
index 23f16b0..9021e90 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/models/worksheet.js
@@ -20,7 +20,7 @@ import DS from 'ember-data';
export default DS.Model.extend({
title: DS.attr('string'),
- query: DS.attr('string'),
+ query: DS.attr('string', {defaultValue: ''}),
selectedDb: DS.attr('string'),
owner: DS.attr('string'),
queryResult: DS.attr({defaultValue: {'schema' :[], 'rows' :[]}}),
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/router.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/router.js b/contrib/views/hive20/src/main/resources/ui/app/router.js
index e32dfe8..b9db38d 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/router.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/router.js
@@ -31,6 +31,7 @@ Router.map(function() {
this.route('savedqueries');
this.route('databases', function() {
+ this.route('newtable');
this.route('database', {path: '/:databaseId'}, function() {
this.route('tables', {path: '/tables'}, function() {
this.route('new');
@@ -52,6 +53,7 @@ Router.map(function() {
});
this.route('queries', function() {
+ this.route('new');
this.route('query', {path: '/:worksheetId'}, function() {
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
index 93f5e61..aa77897 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/application.js
@@ -21,11 +21,11 @@ import tabs from '../configs/top-level-tabs';
export default Ember.Route.extend({
keepAlive: Ember.inject.service('keep-alive'),
- init: function() {
+ init: function () {
this._super(...arguments);
this.get('keepAlive').initialize();
},
- setupController: function(controller, model) {
+ setupController: function (controller, model) {
this._super(controller, model);
controller.set('tabs', tabs);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/routes/databases/newtable.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/databases/newtable.js b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/newtable.js
new file mode 100644
index 0000000..7502183
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/databases/newtable.js
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+
+ setupController(controller, model) {
+ this._super(controller, model);
+ let selectedDatabase = this.modelFor('databases').filterBy('selected', true).get('firstObject');
+ Ember.run.later(() => {
+ this.transitionTo('databases.database.tables.new', selectedDatabase.get('name'));
+ }, 100);
+ }
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/routes/queries.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries.js
index d9f0360..4ef3834 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries.js
@@ -20,56 +20,9 @@ import Ember from 'ember';
export default Ember.Route.extend({
- model() {
-
- let existingWorksheets = this.store.peekAll('worksheet');
-
- if(existingWorksheets.get('length') === 0) {
- this.store.createRecord('worksheet', {
- id: 'worksheet1',
- title: 'Worksheet1',
- query: 'select 1;',
- selectedDb : 'default',
- owner: 'admin',
- selected: true
- });
- }
-
- return this.store.peekAll('worksheet');
-
- },
- setupController(controller, model) {
- this._super(...arguments);
- controller.set('worksheets', model);
-
- // This is just the initial currentWorksheet, It will be set on correctly on click of worksheet.
- controller.set('currentWorksheet', controller.get('worksheets').get('firstObject'));
-
- },
-
actions: {
-
createNewWorksheet(){
-
- let worksheets = this.controllerFor('queries').get('model');
- worksheets.forEach((worksheet) => {
- worksheet.set('selected', false);
- });
-
- let localWs = {
- id: `worksheet${worksheets.get('length') + 1}`,
- title:`Worksheet${worksheets.get('length') + 1}`,
- query: 'select '+ parseInt(worksheets.get('length') + 1) + ';',
- selectedDb : 'default',
- owner: 'admin',
- selected: true
- };
-
- let newWorksheet = this.store.createRecord('worksheet', localWs );
- this.set('controller.worksheets', this.store.peekAll('worksheet'));
-
- this.transitionTo('queries.query', localWs.title);
+ this.transitionTo('queries.new');
}
-
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/routes/queries/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/index.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/index.js
index 9872583..f2af6bc 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/index.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/index.js
@@ -20,10 +20,13 @@ import Ember from 'ember';
export default Ember.Route.extend({
beforeModel() {
- if(this.modelFor('queries').filterBy('selected', true).length > 0){
- let selectedWorksheet = this.modelFor('queries').filterBy('selected', true).get('firstObject');
- console.log('worksheet-title', selectedWorksheet.get('title'));
+ let existingWorksheets = this.store.peekAll('worksheet');
+ if(existingWorksheets.get('length') > 0) {
+ let selectedWorksheet = existingWorksheets.filterBy('selected', true).get('firstObject');
+ this.controllerFor('queries').set('worksheets', existingWorksheets);
this.transitionTo('queries.query', selectedWorksheet.get('title'));
+ } else {
+ this.transitionTo('queries.new');
}
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
new file mode 100644
index 0000000..5a869c2
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+ beforeModel() {
+ let existingWorksheets = this.store.peekAll('worksheet');
+ let newWorksheetName = `worksheet${existingWorksheets.get('length') + 1}`;
+ let newWorksheetTitle = newWorksheetName.capitalize();
+ this.store.createRecord('worksheet', {
+ id: newWorksheetName,
+ title: newWorksheetTitle,
+ //query: 'select 1;',
+ selectedDb : 'default',
+ //owner: 'admin',
+ selected: true
+ });
+ existingWorksheets.setEach('selected', false);
+ this.controllerFor('queries').set('worksheets', this.store.peekAll('worksheet'));
+ this.transitionTo('queries.query', newWorksheetTitle);
+ }
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 2ecf967..7d387f0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -26,9 +26,7 @@ export default Ember.Route.extend({
beforeModel(){
let existingWorksheets = this.store.peekAll('worksheet');
- existingWorksheets.forEach((worksheet) => {
- worksheet.set('selected', false);
- });
+ existingWorksheets.setEach('selected', false);
},
afterModel(model) {
@@ -39,9 +37,13 @@ export default Ember.Route.extend({
},
model(params) {
- let selectedWs = this.modelFor('queries').filterBy('title', params.worksheetId).get('firstObject');
- selectedWs.set('selected', true);
- return selectedWs;
+ let selectedWs = this.store.peekAll('worksheet').filterBy('title', params.worksheetId).get('firstObject');
+ if(selectedWs) {
+ selectedWs.set('selected', true);
+ return selectedWs;
+ } else {
+ this.transitionTo('queries');
+ }
},
setupController(controller, model) {
@@ -173,7 +175,7 @@ export default Ember.Route.extend({
self.get('controller.model').set('logFile', data.job.logFile);
self.get('controller').set('currentJobId', data.job.id);
- self.get('jobs').waitForJobToComplete(data.job.id, 5 * 1000)
+ self.get('jobs').waitForJobToComplete(data.job.id, 5 * 1000, false)
.then((status) => {
Ember.run.later(() => {
self.get('controller').set('isJobSuccess', true);
@@ -468,4 +470,5 @@ export default Ember.Route.extend({
$('.editor-result-list').addClass('active');
}
}
+
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
index 5d7ce77..ff54152 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -60,6 +60,10 @@ export default Ember.Service.extend({
reject(error);
});
});
- }
+ },
+
+ _fetchDummyResult(jobId) {
+ this.get('store').adapterFor('job').fetchResult(jobId);
+ },
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
index f930be9..6f451e9 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/top-application-bar.hbs
@@ -19,7 +19,7 @@
<h3 class="clearfix">
<strong>HIVE</strong>
<span class="pull-right">
- <button class="btn btn-sm btn-success">{{fa-icon "plus"}} NEW JOB</button>
- <button class="btn btn-sm btn-success">{{fa-icon "plus"}} NEW TABLE</button>
+ {{#link-to 'queries.new' class="btn btn-sm btn-success"}}{{fa-icon "plus"}} NEW JOB{{/link-to}}
+ {{#link-to 'databases.newtable' class="btn btn-sm btn-success"}}{{fa-icon "plus"}} NEW TABLE{{/link-to}}
</span>
</h3>
http://git-wip-us.apache.org/repos/asf/ambari/blob/985dafff/contrib/views/hive20/src/main/resources/ui/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/config/environment.js b/contrib/views/hive20/src/main/resources/ui/config/environment.js
index 9efd491..42dde20 100644
--- a/contrib/views/hive20/src/main/resources/ui/config/environment.js
+++ b/contrib/views/hive20/src/main/resources/ui/config/environment.js
@@ -34,6 +34,8 @@ module.exports = function(environment) {
APP: {
// Here you can pass flags/options to your application instance
// when it is created
+ LOG_VIEW_LOOKUPS: false,
+ LOG_ACTIVE_GENERATION: false
}
};
[21/30] ambari git commit: AMBARI-19744. Hive View 2.0: Hive view
starts with an empty page. (dipayanb)
Posted by nc...@apache.org.
AMBARI-19744. Hive View 2.0: Hive view starts with an empty page. (dipayanb)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/37b41424
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/37b41424
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/37b41424
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 37b414241b9b8a7b909278335037b7c0fe6fa441
Parents: abcd063
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Mon Jan 30 14:32:50 2017 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Jan 30 14:33:41 2017 +0530
----------------------------------------------------------------------
.../src/main/resources/ui/app/routes/index.js | 26 ++++++++++++++++++++
1 file changed, 26 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/37b41424/contrib/views/hive20/src/main/resources/ui/app/routes/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/index.js b/contrib/views/hive20/src/main/resources/ui/app/routes/index.js
new file mode 100644
index 0000000..ddaff8a
--- /dev/null
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/index.js
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import Ember from 'ember';
+
+export default Ember.Route.extend({
+ beforeModel() {
+ this.transitionTo('queries');
+ }
+
+});
[13/30] ambari git commit: AMBARI-19723 Log Search portal not working
if only solr needs SSL connection (mgergely)
Posted by nc...@apache.org.
AMBARI-19723 Log Search portal not working if only solr needs SSL connection (mgergely)
Change-Id: Iebb2ef076dd4f75af73757ab3f08f250c654df69
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/015d4046
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/015d4046
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/015d4046
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 015d404604da164aa939635b7835a9551321cc5e
Parents: 86fbb38
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Sun Jan 29 13:24:00 2017 +0100
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Sun Jan 29 13:24:00 2017 +0100
----------------------------------------------------------------------
.../ambari-logsearch-portal/pom.xml | 5 +
.../org/apache/ambari/logsearch/LogSearch.java | 98 +------------
.../apache/ambari/logsearch/util/FileUtil.java | 26 ++++
.../apache/ambari/logsearch/util/SSLUtil.java | 141 +++++++++++++++----
4 files changed, 148 insertions(+), 122 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/015d4046/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index 61dcb37..32d4e2c 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -786,5 +786,10 @@
<artifactId>bcprov-jdk15on</artifactId>
<version>1.55</version>
</dependency>
+ <dependency>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcpkix-jdk15on</artifactId>
+ <version>1.55</version>
+ </dependency>
</dependencies>
</project>
http://git-wip-us.apache.org/repos/asf/ambari/blob/015d4046/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index 88cc8bb..70053d2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -18,17 +18,12 @@
*/
package org.apache.ambari.logsearch;
-import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.ServerSocket;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
-import java.security.KeyPair;
-import java.security.KeyStore;
-import java.security.Security;
-import java.security.cert.X509Certificate;
import java.util.EnumSet;
import org.apache.ambari.logsearch.common.ManageStartEndTime;
@@ -36,12 +31,7 @@ import org.apache.ambari.logsearch.common.PropertiesHelper;
import org.apache.ambari.logsearch.conf.ApplicationConfig;
import org.apache.ambari.logsearch.util.SSLUtil;
import org.apache.ambari.logsearch.web.listener.LogSearchSessionListener;
-import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
-import org.apache.tools.ant.Project;
-import org.apache.tools.ant.taskdefs.Chmod;
-import org.apache.tools.ant.types.FileSet;
-import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
@@ -73,8 +63,6 @@ public class LogSearch {
private static final Logger LOG = LoggerFactory.getLogger(LogSearch.class);
private static final String LOGSEARCH_PROTOCOL_PROP = "logsearch.protocol";
- private static final String LOGSEARCH_CERT_FOLDER_LOCATION = "logsearch.cert.folder.location";
- private static final String LOGSEARCH_CERT_ALGORITHM = "logsearch.cert.algorithm";
private static final String HTTPS_PROTOCOL = "https";
private static final String HTTP_PROTOCOL = "http";
private static final String HTTPS_PORT = "61889";
@@ -84,15 +72,6 @@ public class LogSearch {
private static final String ROOT_CONTEXT = "/";
private static final Integer SESSION_TIMEOUT = 60 * 30;
- private static final String LOGSEARCH_CERT_FILENAME = "logsearch.crt";
- private static final String LOGSEARCH_KEYSTORE_FILENAME = "logsearch.jks";
- private static final String LOGSEARCH_KEYSTORE_PRIVATE_KEY = "logsearch.private.key";
- private static final String LOGSEARCH_KEYSTORE_PUBLIC_KEY = "logsearch.public.key";
- private static final String LOGSEARCH_CERT_DEFAULT_ALGORITHM = "sha256WithRSAEncryption";
-
- public static final String LOGSEARCH_CERT_DEFAULT_FOLDER = "/etc/ambari-logsearch-portal/conf/keys";
- public static final String LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD = "bigdata";
-
public static void main(String[] argv) {
LogSearch logSearch = new LogSearch();
ManageStartEndTime.manage();
@@ -104,7 +83,8 @@ public class LogSearch {
}
public void run(String[] argv) throws Exception {
- loadKeystore();
+ SSLUtil.ensureStorePasswords();
+ SSLUtil.loadKeystore();
Server server = buildSever(argv);
HandlerList handlers = new HandlerList();
handlers.addHandler(createSwaggerContext());
@@ -113,11 +93,9 @@ public class LogSearch {
server.setHandler(handlers);
server.start();
- LOG
- .debug("============================Server Dump=======================================");
+ LOG.debug("============================Server Dump=======================================");
LOG.debug(server.dump());
- LOG
- .debug("==============================================================================");
+ LOG.debug("==============================================================================");
server.join();
}
@@ -202,7 +180,7 @@ public class LogSearch {
private URI findWebResourceBase() {
URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
.getResource(WEB_RESOURCE_FOLDER);
- String errorMessage = "Web Resource Folder " + WEB_RESOURCE_FOLDER+ " not found in classpath";
+ String errorMessage = "Web Resource Folder " + WEB_RESOURCE_FOLDER + " not found in classpath";
if (fileCompleteUrl != null) {
try {
return fileCompleteUrl.toURI().normalize();
@@ -238,70 +216,4 @@ public class LogSearch {
}
}
}
-
- /**
- * Create keystore with keys and certificate (only if the keystore does not exist or if you have no permissions on the keystore file)
- */
- void loadKeystore() {
- try {
- String certFolder = PropertiesHelper.getProperty(LOGSEARCH_CERT_FOLDER_LOCATION, LOGSEARCH_CERT_DEFAULT_FOLDER);
- String certAlgorithm = PropertiesHelper.getProperty(LOGSEARCH_CERT_ALGORITHM, LOGSEARCH_CERT_DEFAULT_ALGORITHM);
- String certLocation = String.format("%s/%s", LOGSEARCH_CERT_DEFAULT_FOLDER, LOGSEARCH_CERT_FILENAME);
- String keyStoreLocation = StringUtils.isNotEmpty(SSLUtil.getKeyStoreLocation()) ? SSLUtil.getKeyStoreLocation()
- : String.format("%s/%s", LOGSEARCH_CERT_DEFAULT_FOLDER, LOGSEARCH_KEYSTORE_FILENAME);
- char[] password = StringUtils.isNotEmpty(SSLUtil.getKeyStorePassword()) ?
- SSLUtil.getKeyStorePassword().toCharArray() : LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD.toCharArray();
- boolean keyStoreFileExists = new File(keyStoreLocation).exists();
- if (!keyStoreFileExists) {
- createDefaultKeyFolder(certFolder);
- LOG.warn("Keystore file ('{}') does not exist, creating new one. " +
- "If the file exists, make sure you have proper permissions on that.", keyStoreLocation);
- if (SSLUtil.isKeyStoreSpecified() && !"JKS".equalsIgnoreCase(SSLUtil.getKeyStoreType())) {
- throw new RuntimeException(String.format("Keystore does not exist. Only JKS keystore can be auto generated. (%s)", keyStoreLocation));
- }
- LOG.info("SSL keystore is not specified. Generating it with certificate ... (using default format: JKS)");
- Security.addProvider(new BouncyCastleProvider());
- KeyPair keyPair = SSLUtil.createKeyPair("RSA", 2048);
- File privateKeyFile = new File(String.format("%s/%s", certFolder, LOGSEARCH_KEYSTORE_PRIVATE_KEY));
- if (!privateKeyFile.exists()) {
- FileUtils.writeByteArrayToFile(privateKeyFile, keyPair.getPrivate().getEncoded());
- }
- File file = new File(String.format("%s/%s", certFolder, LOGSEARCH_KEYSTORE_PUBLIC_KEY));
- if (!file.exists()) {
- FileUtils.writeByteArrayToFile(file, keyPair.getPublic().getEncoded());
- }
- X509Certificate cert = SSLUtil.generateCertificate(certLocation, keyPair, certAlgorithm);
- KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
- keyStore.load(null, password);
- SSLUtil.setKeyAndCertInKeystore(cert, keyPair, keyStore, keyStoreLocation, password);
- setPermissionOnCertFolder(certFolder);
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private void createDefaultKeyFolder(String certFolder) {
- File keyFolderDirectory = new File(certFolder);
- if (!keyFolderDirectory.exists()) {
- LOG.info("Default key dir does not exist ({}). Creating ...", certFolder);
- boolean mkDirSuccess = keyFolderDirectory.mkdirs();
- if (!mkDirSuccess) {
- String errorMessage = String.format("Could not create directory %s", certFolder);
- LOG.error(errorMessage);
- throw new RuntimeException(errorMessage);
- }
- }
- }
-
- private void setPermissionOnCertFolder(String certFolder) {
- Chmod chmod = new Chmod();
- chmod.setProject(new Project());
- FileSet fileSet = new FileSet();
- fileSet.setDir(new File(certFolder));
- fileSet.setIncludes("**");
- chmod.addFileset(fileSet);
- chmod.setPerm("600");
- chmod.execute();
- }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/015d4046/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
index f7330fa..5d4efbc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
@@ -23,6 +23,9 @@ import java.io.File;
import java.net.URL;
import org.apache.log4j.Logger;
+import org.apache.tools.ant.Project;
+import org.apache.tools.ant.taskdefs.Chmod;
+import org.apache.tools.ant.types.FileSet;
public class FileUtil {
private static final Logger logger = Logger.getLogger(FileUtil.class);
@@ -43,4 +46,27 @@ public class FileUtil {
return file;
}
+ public static void createDirectory(String dirPath) {
+ File dir = new File(dirPath);
+ if (!dir.exists()) {
+ logger.info("Directory " + dirPath + " does not exist. Creating ...");
+ boolean mkDirSuccess = dir.mkdirs();
+ if (!mkDirSuccess) {
+ String errorMessage = String.format("Could not create directory %s", dirPath);
+ logger.error(errorMessage);
+ throw new RuntimeException(errorMessage);
+ }
+ }
+ }
+
+ public static void setPermissionOnDirectory(String dirPath, String permission) {
+ Chmod chmod = new Chmod();
+ chmod.setProject(new Project());
+ FileSet fileSet = new FileSet();
+ fileSet.setDir(new File(dirPath));
+ fileSet.setIncludes("**");
+ chmod.addFileset(fileSet);
+ chmod.setPerm(permission);
+ chmod.execute();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/015d4046/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
index e0111e7..ea3474f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SSLUtil.java
@@ -26,10 +26,20 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.bouncycastle.jce.X509Principal;
+import org.bouncycastle.asn1.x500.X500Name;
+import org.bouncycastle.asn1.x509.AlgorithmIdentifier;
+import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
-import org.bouncycastle.x509.X509V3CertificateGenerator;
+import org.bouncycastle.operator.ContentSigner;
+import org.bouncycastle.operator.DefaultDigestAlgorithmIdentifierFinder;
+import org.bouncycastle.operator.DefaultSignatureAlgorithmIdentifierFinder;
+import org.bouncycastle.operator.OperatorCreationException;
+import org.bouncycastle.operator.bc.BcContentSignerBuilder;
+import org.bouncycastle.operator.bc.BcRSAContentSignerBuilder;
+import org.bouncycastle.cert.X509CertificateHolder;
+import org.bouncycastle.cert.X509v3CertificateBuilder;
+import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter;
+import org.bouncycastle.crypto.params.RSAKeyParameters;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -37,6 +47,7 @@ import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
+import java.io.IOException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.security.InvalidKeyException;
@@ -49,14 +60,13 @@ import java.security.SecureRandom;
import java.security.Security;
import java.security.SignatureException;
import java.security.cert.Certificate;
-import java.security.cert.CertificateEncodingException;
+import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
import java.util.Date;
-import static org.apache.ambari.logsearch.LogSearch.LOGSEARCH_CERT_DEFAULT_FOLDER;
-import static org.apache.ambari.logsearch.LogSearch.LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD;
-
public class SSLUtil {
private static final Logger LOG = LoggerFactory.getLogger(SSLUtil.class);
@@ -74,6 +84,18 @@ public class SSLUtil {
private static final String TRUSTSTORE_PASSWORD_FILE = "ts_pass.txt";
private static final String CREDENTIAL_STORE_PROVIDER_PATH = "hadoop.security.credential.provider.path";
+ private static final String LOGSEARCH_CERT_FOLDER_LOCATION = "logsearch.cert.folder.location";
+ private static final String LOGSEARCH_CERT_ALGORITHM = "logsearch.cert.algorithm";
+
+ private static final String LOGSEARCH_CERT_FILENAME = "logsearch.crt";
+ private static final String LOGSEARCH_KEYSTORE_FILENAME = "logsearch.jks";
+ private static final String LOGSEARCH_KEYSTORE_PRIVATE_KEY = "logsearch.private.key";
+ private static final String LOGSEARCH_KEYSTORE_PUBLIC_KEY = "logsearch.public.key";
+ private static final String LOGSEARCH_CERT_DEFAULT_ALGORITHM = "sha256WithRSA";
+
+ private static final String LOGSEARCH_CERT_DEFAULT_FOLDER = "/etc/ambari-logsearch-portal/conf/keys";
+ private static final String LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD = "bigdata";
+
private SSLUtil() {
throw new UnsupportedOperationException();
}
@@ -111,8 +133,6 @@ public class SSLUtil {
}
public static SslContextFactory getSslContextFactory() {
- setPasswordIfSysPropIsEmpty(KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_PROPERTY_NAME, KEYSTORE_PASSWORD_FILE);
- setPasswordIfSysPropIsEmpty(TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_PROPERTY_NAME, TRUSTSTORE_PASSWORD_FILE);
SslContextFactory sslContextFactory = new SslContextFactory();
sslContextFactory.setKeyStorePath(getKeyStoreLocation());
sslContextFactory.setKeyStorePassword(getKeyStorePassword());
@@ -171,7 +191,7 @@ public class SSLUtil {
char[] passwordChars = config.getPassword(propertyName);
return (ArrayUtils.isNotEmpty(passwordChars)) ? new String(passwordChars) : null;
} catch (Exception e) {
- LOG.warn(String.format("Could not load password %s from credential store, using default password", propertyName));
+ LOG.warn(String.format("Could not load password %s from credential store, using default password", propertyName), e);
return null;
}
}
@@ -193,7 +213,7 @@ public class SSLUtil {
/**
* Put private key into in-memory keystore and write it to a file (JKS file)
*/
- public static void setKeyAndCertInKeystore(X509Certificate cert, KeyPair keyPair, KeyStore keyStore, String keyStoreLocation, char[] password)
+ private static void setKeyAndCertInKeystore(X509Certificate cert, KeyPair keyPair, KeyStore keyStore, String keyStoreLocation, char[] password)
throws Exception {
Certificate[] certChain = new Certificate[1];
certChain[0] = cert;
@@ -201,7 +221,7 @@ public class SSLUtil {
keyStore.setKeyEntry("logsearch.alias", keyPair.getPrivate(), password, certChain);
keyStore.store(fos, password);
} catch (Exception e) {
- LOG.error("Could not write certificate to Keystore");
+ LOG.error("Could not write certificate to Keystore", e);
throw e;
}
}
@@ -209,7 +229,7 @@ public class SSLUtil {
/**
* Create in-memory keypair with bouncy castle
*/
- public static KeyPair createKeyPair(String encryptionType, int byteCount)
+ private static KeyPair createKeyPair(String encryptionType, int byteCount)
throws NoSuchProviderException, NoSuchAlgorithmException {
Security.addProvider(new BouncyCastleProvider());
KeyPairGenerator keyPairGenerator = createKeyPairGenerator(encryptionType, byteCount);
@@ -219,7 +239,7 @@ public class SSLUtil {
/**
* Generate X509 certificate if it does not exist
*/
- public static X509Certificate generateCertificate(String certificateLocation, KeyPair keyPair, String algorithm) throws Exception {
+ private static X509Certificate generateCertificate(String certificateLocation, KeyPair keyPair, String algorithm) throws Exception {
try {
File certFile = new File(certificateLocation);
if (certFile.exists()) {
@@ -227,44 +247,65 @@ public class SSLUtil {
return getCertFile(certificateLocation);
} else {
Security.addProvider(new BouncyCastleProvider());
- X509Certificate cert = SSLUtil.createCert(keyPair, algorithm, InetAddress.getLocalHost().getCanonicalHostName());
+ X509Certificate cert = createCert(keyPair, algorithm, InetAddress.getLocalHost().getCanonicalHostName());
FileUtils.writeByteArrayToFile(certFile, cert.getEncoded());
return cert;
}
} catch (Exception e) {
- LOG.error("Could not create certificate.");
+ LOG.error("Could not create certificate.", e);
throw e;
}
}
- private static void setPasswordIfSysPropIsEmpty(String pwdArg, String propertyName, String fileName) {
- if (StringUtils.isEmpty(System.getProperty(pwdArg))) {
+ private static void ensureStorePassword(String locationArg, String pwdArg, String propertyName, String fileName) {
+ if (StringUtils.isNotEmpty(System.getProperty(locationArg)) && StringUtils.isEmpty(System.getProperty(pwdArg))) {
String password = getPassword(propertyName, fileName);
System.setProperty(pwdArg, password);
}
}
+
+ public static void ensureStorePasswords() {
+ ensureStorePassword(KEYSTORE_LOCATION_ARG, KEYSTORE_PASSWORD_ARG, KEYSTORE_PASSWORD_PROPERTY_NAME, KEYSTORE_PASSWORD_FILE);
+ ensureStorePassword(TRUSTSTORE_LOCATION_ARG, TRUSTSTORE_PASSWORD_ARG, TRUSTSTORE_PASSWORD_PROPERTY_NAME, TRUSTSTORE_PASSWORD_FILE);
+ }
private static X509Certificate getCertFile(String location) throws Exception {
try (FileInputStream fos = new FileInputStream(location)) {
CertificateFactory factory = CertificateFactory.getInstance("X.509");
return (X509Certificate) factory.generateCertificate(fos);
} catch (Exception e) {
- LOG.error("Cannot read cert file. ('{}')", location);
+ LOG.error("Cannot read cert file. ('" + location + "')", e);
throw e;
}
}
private static X509Certificate createCert(KeyPair keyPair, String signatureAlgoritm, String domainName)
- throws CertificateEncodingException, NoSuchAlgorithmException, InvalidKeyException, SignatureException {
- X509V3CertificateGenerator v3CertGen = new X509V3CertificateGenerator();
- v3CertGen.setSerialNumber(BigInteger.valueOf(Math.abs(new SecureRandom().nextInt())));
- v3CertGen.setIssuerDN(new X509Principal("CN=" + domainName + ", OU=None, O=None L=None, C=None"));
- v3CertGen.setNotBefore(new Date(System.currentTimeMillis() - 1000L * 60 * 60 * 24 * 30));
- v3CertGen.setNotAfter(new Date(System.currentTimeMillis() + (1000L * 60 * 60 * 24 * 365*10)));
- v3CertGen.setSubjectDN(new X509Principal("CN=" + domainName + ", OU=None, O=None L=None, C=None"));
- v3CertGen.setPublicKey(keyPair.getPublic());
- v3CertGen.setSignatureAlgorithm(signatureAlgoritm);
- return v3CertGen.generate(keyPair.getPrivate());
+ throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, OperatorCreationException, CertificateException, IOException {
+
+ RSAPublicKey rsaPublicKey = (RSAPublicKey) keyPair.getPublic();
+ RSAPrivateKey rsaPrivateKey = (RSAPrivateKey) keyPair.getPrivate();
+
+ AlgorithmIdentifier sigAlgId = new DefaultSignatureAlgorithmIdentifierFinder().find(signatureAlgoritm);
+ AlgorithmIdentifier digAlgId = new DefaultDigestAlgorithmIdentifierFinder().find(sigAlgId);
+ BcContentSignerBuilder sigGen = new BcRSAContentSignerBuilder(sigAlgId, digAlgId);
+
+ SubjectPublicKeyInfo pubKey = new SubjectPublicKeyInfo(sigAlgId, rsaPublicKey.getEncoded());
+
+ X509v3CertificateBuilder v3CertBuilder = new X509v3CertificateBuilder(
+ new X500Name("CN=" + domainName + ", OU=None, O=None L=None, C=None"),
+ BigInteger.valueOf(Math.abs(new SecureRandom().nextInt())),
+ new Date(System.currentTimeMillis() - 1000L * 60 * 60 * 24 * 30),
+ new Date(System.currentTimeMillis() + (1000L * 60 * 60 * 24 * 365*10)),
+ new X500Name("CN=" + domainName + ", OU=None, O=None L=None, C=None"),
+ pubKey);
+
+ RSAKeyParameters keyParams = new RSAKeyParameters(true, rsaPrivateKey.getPrivateExponent(), rsaPrivateKey.getModulus());
+ ContentSigner contentSigner = sigGen.build(keyParams);
+
+ X509CertificateHolder certificateHolder = v3CertBuilder.build(contentSigner);
+
+ JcaX509CertificateConverter certConverter = new JcaX509CertificateConverter();
+ return certConverter.getCertificate(certificateHolder);
}
private static KeyPairGenerator createKeyPairGenerator(String algorithmIdentifier, int bitCount)
@@ -274,4 +315,46 @@ public class SSLUtil {
return kpg;
}
+ /**
+ * Create keystore with keys and certificate (only if the keystore does not exist or if you have no permissions on the keystore file)
+ */
+ public static void loadKeystore() {
+ try {
+ String certFolder = PropertiesHelper.getProperty(LOGSEARCH_CERT_FOLDER_LOCATION, LOGSEARCH_CERT_DEFAULT_FOLDER);
+ String certAlgorithm = PropertiesHelper.getProperty(LOGSEARCH_CERT_ALGORITHM, LOGSEARCH_CERT_DEFAULT_ALGORITHM);
+ String certLocation = String.format("%s/%s", LOGSEARCH_CERT_DEFAULT_FOLDER, LOGSEARCH_CERT_FILENAME);
+ String keyStoreLocation = StringUtils.isNotEmpty(getKeyStoreLocation()) ? getKeyStoreLocation()
+ : String.format("%s/%s", LOGSEARCH_CERT_DEFAULT_FOLDER, LOGSEARCH_KEYSTORE_FILENAME);
+ char[] password = StringUtils.isNotEmpty(getKeyStorePassword()) ?
+ getKeyStorePassword().toCharArray() : LOGSEARCH_KEYSTORE_DEFAULT_PASSWORD.toCharArray();
+ boolean keyStoreFileExists = new File(keyStoreLocation).exists();
+ if (!keyStoreFileExists) {
+ FileUtil.createDirectory(certFolder);
+ LOG.warn("Keystore file ('{}') does not exist, creating new one. " +
+ "If the file exists, make sure you have proper permissions on that.", keyStoreLocation);
+ if (isKeyStoreSpecified() && !"JKS".equalsIgnoreCase(getKeyStoreType())) {
+ throw new RuntimeException(String.format("Keystore does not exist. Only JKS keystore can be auto generated. (%s)", keyStoreLocation));
+ }
+ LOG.info("SSL keystore is not specified. Generating it with certificate ... (using default format: JKS)");
+ Security.addProvider(new BouncyCastleProvider());
+ KeyPair keyPair = createKeyPair("RSA", 2048);
+ File privateKeyFile = new File(String.format("%s/%s", certFolder, LOGSEARCH_KEYSTORE_PRIVATE_KEY));
+ if (!privateKeyFile.exists()) {
+ FileUtils.writeByteArrayToFile(privateKeyFile, keyPair.getPrivate().getEncoded());
+ }
+ File file = new File(String.format("%s/%s", certFolder, LOGSEARCH_KEYSTORE_PUBLIC_KEY));
+ if (!file.exists()) {
+ FileUtils.writeByteArrayToFile(file, keyPair.getPublic().getEncoded());
+ }
+ X509Certificate cert = generateCertificate(certLocation, keyPair, certAlgorithm);
+ KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
+ keyStore.load(null, password);
+ setKeyAndCertInKeystore(cert, keyPair, keyStore, keyStoreLocation, password);
+ FileUtil.setPermissionOnDirectory(certFolder, "600");
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
}
[20/30] ambari git commit: AMBARI-19752. Hive View 2.0 execute button
needs some feedback (pallavkul)
Posted by nc...@apache.org.
AMBARI-19752. Hive View 2.0 execute button needs some feedback (pallavkul)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/abcd0637
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/abcd0637
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/abcd0637
Branch: refs/heads/branch-dev-patch-upgrade
Commit: abcd0637c6f3328d0a6345e0573c30615cdcde83
Parents: 985daff
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 30 14:27:44 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 30 14:29:15 2017 +0530
----------------------------------------------------------------------
contrib/views/hive20/src/main/resources/ui/app/services/jobs.js | 4 ----
contrib/views/hive20/src/main/resources/ui/app/styles/app.scss | 4 ++++
.../hive20/src/main/resources/ui/app/templates/queries/query.hbs | 2 +-
3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/abcd0637/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
index ff54152..5db22d0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -27,16 +27,12 @@ export default Ember.Service.extend({
},
waitForJobToComplete(jobId, after, fetchDummyResult = true) {
- console.log()
return new Ember.RSVP.Promise((resolve, reject) => {
Ember.run.later(() => {
this.get('store').findRecord('job', jobId, { reload: true })
.then((job) => {
let status = job.get('status').toLowerCase();
if (status === 'succeeded') {
- if (fetchDummyResult) {
- this._fetchDummyResult(jobId);
- }
resolve(status);
} else if (status === 'error') {
reject(status)
http://git-wip-us.apache.org/repos/asf/ambari/blob/abcd0637/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index e178222..17abb65 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -41,6 +41,10 @@
height: 100%;
}
+.fa-1-5{
+ font-size: 1.5em;
+}
+
.fa-2 {
font-size: 2em;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/abcd0637/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
index 9e9e542..7f91d6c 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/queries/query.hbs
@@ -31,7 +31,7 @@
<button class="btn btn-success" {{action "executeQuery" }}>{{fa-icon "check"}} Execute</button>
<button class="btn btn-default" {{action "openWorksheetModal" }}>{{fa-icon "save"}} Save As</button>
{{#if isQueryRunning}}
- <img src="http://www.bba-reman.com/images/fbloader.gif" height="22" width="32" />
+ {{fa-icon "spinner fa-1-5" spin=true}}
{{/if}}
</div>
</div>
[18/30] ambari git commit: AMBARI-19750. Hive2 view - Clicking on
browse databases does not work (pallavkul)
Posted by nc...@apache.org.
AMBARI-19750. Hive2 view - Clicking on browse databases does not work (pallavkul)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ea1fbaa0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ea1fbaa0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ea1fbaa0
Branch: refs/heads/branch-dev-patch-upgrade
Commit: ea1fbaa020907c7a433275447043b509f8868dcd
Parents: c977113
Author: pallavkul <pa...@gmail.com>
Authored: Mon Jan 30 14:21:16 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Mon Jan 30 14:21:16 2017 +0530
----------------------------------------------------------------------
.../components/multiple-database-search-bar.js | 31 +++++++++++++++++++-
.../components/multiple-database-search-bar.hbs | 11 ++++---
2 files changed, 35 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ea1fbaa0/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js b/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
index 817a826..c27f80f 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/multiple-database-search-bar.js
@@ -66,6 +66,12 @@ export default Ember.Component.extend({
return selecteddblist;
}),
+ focusComesFromOutside(e){
+ let blurredEl = e.relatedTarget;
+ return !blurredEl || !blurredEl.classList.contains('ember-power-select-search-input');
+ },
+
+
actions: {
createOnEnter(select, e) {
if (e.keyCode === 13 && select.isOpen &&
@@ -79,10 +85,33 @@ export default Ember.Component.extend({
}
},
+ handleFocus(select, e) {
+ if (this.focusComesFromOutside(e)) {
+ select.actions.open();
+ this.$('.browse').addClass('open');
+ }
+
+ },
+
+ handleBlur() {
+ //console.log('handleBlur');
+ },
+
updateTables(){
this.sendAction('changeDbHandler', this.get('selectedDbs'));
- }
+ },
+
+ browse(){
+ if(this.$('.browse').hasClass('open')){
+ this.$('.browse').removeClass('open');
+ this.$('.multiple-db-select input').focusout();
+ } else {
+ this.$('.browse').addClass('open');
+ this.$('.multiple-db-select input').focus();
+ }
+
+ }
}
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/ea1fbaa0/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
index cc102b4..e620347 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/multiple-database-search-bar.hbs
@@ -22,22 +22,21 @@
</div>
<div class="col-md-9">
<div class="input-group-lg">
-
{{#power-select-multiple
+ class="multiple-db-select"
placeholder="Search databases"
options=allDbs
selected=selectedMultiDb
onchange=(pipe-action (action (mut selectedDbs)) (action "updateTables"))
+ onfocus=(action "handleFocus")
+ onblur=(action "handleBlur")
onkeydown=(action "createOnEnter")
as |number|}}
{{fa-icon "database"}} {{number}}
{{/power-select-multiple}}
-
- <span class="input-group-btn" style="top: 0;right: 130px;position: absolute;">
- <button type="button" class="btn btn-default">{{fa-icon "folder"}} Browse <span class="caret"></span></button>
+ <span class="input-group-btn browse" style="top: 0;right: 130px;position: absolute;">
+ <button type="button" class="btn btn-default" {{action "browse" }}>{{fa-icon "folder"}} Browse <span class="caret"></span></button>
</span>
-
-
</div>
</div>
[11/30] ambari git commit: AMBARI-19680: On Capacity Scheduler view,
click * button for Node label Access the 2nd time should de-select
all node labels, but it doesn't. (sangeetar)
Posted by nc...@apache.org.
AMBARI-19680: On Capacity Scheduler view, click * button for Node label Access the 2nd time should de-select all node labels, but it doesn't. (sangeetar)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c2907860
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c2907860
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c2907860
Branch: refs/heads/branch-dev-patch-upgrade
Commit: c2907860e4fea65d272a680509ce656ac6e1f4f0
Parents: 5040391
Author: Sangeeta Ravindran <sa...@apache.org>
Authored: Sat Jan 28 00:23:56 2017 -0800
Committer: Sangeeta Ravindran <sa...@apache.org>
Committed: Sat Jan 28 00:23:56 2017 -0800
----------------------------------------------------------------------
.../capacity-scheduler/src/main/resources/ui/app/models/queue.js | 3 +++
1 file changed, 3 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c2907860/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
----------------------------------------------------------------------
diff --git a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
index df0c684..9198aa5 100644
--- a/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
+++ b/contrib/views/capacity-scheduler/src/main/resources/ui/app/models/queue.js
@@ -106,6 +106,9 @@ App.Queue = DS.Model.extend({
}.bind(this));
this.notifyPropertyChange('labels');
}
+ else {
+ this.get('labels').clear();
+ }
}
return this.get('_accessAllLabels');
[10/30] ambari git commit: AMBARI-19762. Kerberizing PERF cluster
fails since cannot find principal name config in dummy.py. (Madhuvanthi
Radhakrishnan via swagle)
Posted by nc...@apache.org.
AMBARI-19762. Kerberizing PERF cluster fails since cannot find principal name config in dummy.py. (Madhuvanthi Radhakrishnan via swagle)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/50403914
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/50403914
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/50403914
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 50403914c7a092f6e66726b3bf2331be13625f71
Parents: 0e8c966
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Fri Jan 27 18:42:47 2017 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Fri Jan 27 18:42:47 2017 -0800
----------------------------------------------------------------------
.../PERF/1.0/services/FAKEHDFS/package/scripts/zkfc_slave.py | 5 +++++
.../resources/stacks/PERF/1.0/services/FAKEYARN/metainfo.xml | 4 ++--
2 files changed, 7 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/50403914/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/scripts/zkfc_slave.py b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/scripts/zkfc_slave.py
index 66e36ad..45f6348 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/scripts/zkfc_slave.py
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/scripts/zkfc_slave.py
@@ -33,6 +33,11 @@ class ZkfcSlave(Dummy):
def __init__(self):
super(ZkfcSlave, self).__init__()
self.component_name = "FAKEZKFC"
+ self.principal_conf_name = "hadoop-env"
+ self.principal_name = "hdfs_principal_name"
+ self.keytab_conf_name = "hadoop-env"
+ self.keytab_name = "hdfs_user_keytab"
+
if __name__ == "__main__":
ZkfcSlave().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/50403914/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/metainfo.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/metainfo.xml
index 0f926dd..e07f3a7 100644
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/metainfo.xml
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEYARN/metainfo.xml
@@ -229,9 +229,9 @@
</service>
<service>
- <name>FAKEFAKEMAPREDUCE2</name>
+ <name>FAKEMAPREDUCE2</name>
<version>2.7.1.2.5</version>
- <displayName>FAKEFAKEMapReduce2</displayName>
+ <displayName>FAKEMapReduce2</displayName>
<comment>Apache Hadoop NextGen MapReduce (FAKEYARN)</comment>
<configuration-dir>configuration-mapred</configuration-dir>
[25/30] ambari git commit: AMBARI-19774. Undo Add Node feature needed
for workflow designer similar to Undo Delete(Padma Priya N via gauravn7)
Posted by nc...@apache.org.
AMBARI-19774. Undo Add Node feature needed for workflow designer similar to Undo Delete(Padma Priya N via gauravn7)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff34b053
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff34b053
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff34b053
Branch: refs/heads/branch-dev-patch-upgrade
Commit: ff34b053d047889d261524f4b676b915a9176934
Parents: 589f567
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Mon Jan 30 17:36:57 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Mon Jan 30 17:37:49 2017 +0530
----------------------------------------------------------------------
.../src/main/resources/ui/app/components/flow-designer.js | 5 +++--
.../resources/ui/app/templates/components/flow-designer.hbs | 8 +++++---
2 files changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff34b053/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
index 1292832..f863656 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/components/flow-designer.js
@@ -616,7 +616,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
}
this.rerender();
this.doValidation();
- this.showUndo('node');
+ this.showUndo('nodeDeleted');
},
addWorkflowBranch(node){
this.createSnapshot();
@@ -846,6 +846,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
this.rerender();
this.doValidation();
this.scrollToNewPosition();
+ this.showUndo('nodeAdded');
},
nameChanged(){
this.doValidation();
@@ -1095,7 +1096,7 @@ export default Ember.Component.extend(FindNodeMixin, Validations, {
this.persistWorkInProgress();
},
- undoDelete () {
+ undo () {
var workflowImporter = WorkflowJsonImporter.create({});
var workflow = workflowImporter.importWorkflow(this.get('workflowSnapshot'));
this.resetDesigner();
http://git-wip-us.apache.org/repos/asf/ambari/blob/ff34b053/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
index 2b8844e..df0a9ba 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/templates/components/flow-designer.hbs
@@ -141,10 +141,12 @@
{{designer-errors errors=errors validationErrors=validationErrors}}
{{#if undoAvailable}}
<div id="alert"class="alert alert-warning" role="alert">
- {{#if (eq undoType 'node')}}
- <label>Node <i>{{deletedNode.name}}</i> deleted <span class="undo" {{action 'undoDelete'}}><u>Undo</u></span></label>
+ {{#if (eq undoType 'nodeDeleted')}}
+ <label>Node deleted <span class="undo" {{action 'undo'}}><u>Undo</u></span></label>
+ {{else if (eq undoType 'nodeAdded')}}
+ <label>Node added <span class="undo" {{action 'undo'}}><u>Undo</u></span></label>
{{else}}
- <label>Transition deleted <span class="undo" {{action 'undoDelete'}}><u>Undo</u></span></label>
+ <label>Transition deleted <span class="undo" {{action 'undo'}}><u>Undo</u></span></label>
{{/if}}
</div>
{{/if}}
[04/30] ambari git commit: Revert "AMBARI-19741. Ambari Server Unit
Test failure on branch-2.5/trunk for testUpdateConfigForceSecurityEnabled
(echekanskiy via dlysnichenko)"
Posted by nc...@apache.org.
Revert "AMBARI-19741. Ambari Server Unit Test failure on branch-2.5/trunk for testUpdateConfigForceSecurityEnabled (echekanskiy via dlysnichenko)"
This reverts commit cc30798a6f1428fbe2a2dd6c64cac2c128cf9a23.
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/65eb8867
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/65eb8867
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/65eb8867
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 65eb886720c2ccff344fe99928fabc23bcc48738
Parents: 23522a2
Author: Robert Levas <rl...@hortonworks.com>
Authored: Fri Jan 27 15:09:57 2017 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Fri Jan 27 15:09:57 2017 -0500
----------------------------------------------------------------------
.../UpdateKerberosConfigsServerActionTest.java | 14 ++++----------
1 file changed, 4 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/65eb8867/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
index c8ebb63..e756491 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/UpdateKerberosConfigsServerActionTest.java
@@ -22,7 +22,7 @@ import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
import java.io.File;
import java.util.Collection;
@@ -43,6 +43,7 @@ import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
+
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
@@ -164,15 +165,8 @@ public class UpdateKerberosConfigsServerActionTest extends EasyMockSupport{
action.setExecutionCommand(executionCommand);
action.execute(null);
- assertTrue(configTypes.getValues().contains("cluster-env"));
- boolean containsSecurityEnabled = false;
- for(Map<String, String> properties: configUpdates.getValues()) {
- if(properties.containsKey("security_enabled")) {
- containsSecurityEnabled = true;
- break;
- }
- }
- assertTrue(containsSecurityEnabled);
+ assertEquals(configTypes.getValue(), "cluster-env");
+ assertEquals(configUpdates.getValue().get("security_enabled"), "false");
verifyAll();
}
[12/30] ambari git commit: AMBARI-19726. Post user creation hook is
passed the hdfs user from the hadoop-env config type. (Laszlo Puskas via
stoader)
Posted by nc...@apache.org.
AMBARI-19726. Post user creation hook is passed the hdfs user from the hadoop-env config type. (Laszlo Puskas via stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/86fbb381
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/86fbb381
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/86fbb381
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 86fbb38108e2d9b87f22b1920cada3a655677285
Parents: c290786
Author: Laszlo Puskas <lp...@hortonworks.com>
Authored: Sat Jan 28 18:27:31 2017 +0100
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Sat Jan 28 18:27:31 2017 +0100
----------------------------------------------------------------------
.../server/hooks/users/UserHookParams.java | 3 ++-
.../server/hooks/users/UserHookService.java | 23 ++++++++++++++------
.../users/CsvFilePersisterService.java | 6 ++---
.../users/PostUserCreationHookServerAction.java | 8 ++++++-
.../scripts/post-user-creation-hook.sh | 9 ++++++--
.../server/hooks/users/UserHookServiceTest.java | 9 ++++++++
.../PostUserCreationHookServerActionTest.java | 3 ++-
7 files changed, 45 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookParams.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookParams.java b/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookParams.java
index 6970dcc..4b1b5aa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookParams.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookParams.java
@@ -34,7 +34,8 @@ public enum UserHookParams {
// identify security related values
CLUSTER_SECURITY_TYPE("cluster-security-type"),
CMD_HDFS_PRINCIPAL("cmd-hdfs-principal"),
- CMD_HDFS_KEYTAB("cmd-hdfs-keytab");
+ CMD_HDFS_KEYTAB("cmd-hdfs-keytab"),
+ CMD_HDFS_USER("cmd-hdfs-user");
private String param;
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookService.java b/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookService.java
index c4ff1e4..69463ab 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/hooks/users/UserHookService.java
@@ -170,8 +170,10 @@ public class UserHookService implements HookService {
commandParams.put(UserHookParams.CMD_HDFS_KEYTAB.param(), clusterData.getKeytab());
commandParams.put(UserHookParams.CMD_HDFS_PRINCIPAL.param(), clusterData.getPrincipal());
+ commandParams.put(UserHookParams.CMD_HDFS_USER.param(), clusterData.getHdfsUser());
commandParams.put(UserHookParams.CMD_INPUT_FILE.param(), generateInputFileName());
+
commandParams.put(UserHookParams.PAYLOAD.param(), objectMapper.writeValueAsString(context.getUserGroups()));
return commandParams;
@@ -228,15 +230,16 @@ public class UserHookService implements HookService {
break;
}
- return new ClusterData(cluster.getClusterName(), cluster.getClusterId(), cluster.getSecurityType().name(), principal, keyTab);
- }
- private void getSecurityData(Configuration configuraiton) {
- //principal
+ return new ClusterData(cluster.getClusterName(), cluster.getClusterId(), cluster.getSecurityType().name(), principal, keyTab, getHdfsUser(cluster));
+ }
- //keytab
+ private String getHdfsUser(Cluster cluster) {
+ String hdfsUser = cluster.getDesiredConfigByType("hadoop-env").getProperties().get("hdfs_user");
+ return hdfsUser;
}
+
/**
* Local representation of cluster data.
*/
@@ -247,12 +250,15 @@ public class UserHookService implements HookService {
private String principal;
private String keytab;
- public ClusterData(String clusterName, Long clusterId, String securityType, String principal, String keytab) {
+ private String hdfsUser;
+
+ public ClusterData(String clusterName, Long clusterId, String securityType, String principal, String keytab, String hdfsUser) {
this.clusterName = clusterName;
this.clusterId = clusterId;
this.securityType = securityType;
this.principal = principal;
this.keytab = keytab;
+ this.hdfsUser = hdfsUser;
}
public String getClusterName() {
@@ -274,6 +280,9 @@ public class UserHookService implements HookService {
public String getKeytab() {
return keytab;
}
- }
+ public String getHdfsUser() {
+ return hdfsUser;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
index fe6bf35..b78a127 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/CsvFilePersisterService.java
@@ -18,15 +18,12 @@
package org.apache.ambari.server.serveraction.users;
-import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
-import java.nio.file.attribute.PosixFilePermissions;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
@@ -72,7 +69,8 @@ public class CsvFilePersisterService implements CollectionPersisterService<Strin
@Inject
public void init() throws IOException {
- Path csv = Files.createFile(Paths.get(csvFile), PosixFilePermissions.asFileAttribute(getCsvPermissions()));
+ Path csv = Files.createFile(Paths.get(csvFile));
+ Files.setPosixFilePermissions(Paths.get(csvFile), getCsvPermissions());
fileWriter = new FileWriter(csv.toFile());
csvPrinter = new CSVPrinter(fileWriter, CSVFormat.DEFAULT.withRecordSeparator(NEW_LINE_SEPARATOR));
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerAction.java
index 45bc0eb..2d6d38d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerAction.java
@@ -116,7 +116,8 @@ public class PostUserCreationHookServerAction extends AbstractServerAction {
params.get(UserHookParams.CMD_INPUT_FILE.param()),
params.get(UserHookParams.CLUSTER_SECURITY_TYPE.param()),
params.get(UserHookParams.CMD_HDFS_PRINCIPAL.param()),
- params.get(UserHookParams.CMD_HDFS_KEYTAB.param())
+ params.get(UserHookParams.CMD_HDFS_KEYTAB.param()),
+ params.get(UserHookParams.CMD_HDFS_USER.param())
};
LOGGER.debug("Server action command to be executed: {}", cmdArray);
return cmdArray;
@@ -149,6 +150,11 @@ public class PostUserCreationHookServerAction extends AbstractServerAction {
throw new IllegalArgumentException("Missing command parameter: [" + UserHookParams.CLUSTER_SECURITY_TYPE.param() + "]");
}
+ if (!commandParams.containsKey(UserHookParams.CMD_HDFS_USER.param())) {
+ LOGGER.error("Missing command parameter: {}; Failing the server action.", UserHookParams.CMD_HDFS_USER.param());
+ throw new IllegalArgumentException("Missing command parameter: [" + UserHookParams.CMD_HDFS_USER.param() + "]");
+ }
+
LOGGER.info("Command parameter validation passed.");
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh b/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
index ee8d2d1..91511a0 100755
--- a/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
+++ b/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
@@ -30,6 +30,8 @@ CSV_FILE="$1"
SECURITY_TYPE=$2
: "${SECURITY_TYPE:?"Missing security type input for the post-user creation hook"}"
+# the last argument is the user with dfs administrator privileges
+HDFS_USR=${@: -1}
}
@@ -37,8 +39,8 @@ SECURITY_TYPE=$2
ambari_sudo(){
ARG_STR="$1"
-CMD_STR="/var/lib/ambari-server/ambari-sudo.sh su hdfs -l -s /bin/bash -c '$ARG_STR'"
-
+CMD_STR="/var/lib/ambari-server/ambari-sudo.sh su '$HDFS_USR' -l -s /bin/bash -c '$ARG_STR'"
+echo "Executing command: [ $CMD_STR ]"
eval "$CMD_STR"
}
@@ -101,6 +103,9 @@ do
EOF
done <"$CSV_FILE"
+# Setting read permissions on the generated file
+chmod 644 $JSON_INPUT
+
# deleting the last line
sed -i '$ d' "$JSON_INPUT"
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/test/java/org/apache/ambari/server/hooks/users/UserHookServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/hooks/users/UserHookServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/hooks/users/UserHookServiceTest.java
index 834f930..b642d35 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/hooks/users/UserHookServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/hooks/users/UserHookServiceTest.java
@@ -37,6 +37,7 @@ import org.apache.ambari.server.hooks.AmbariEventFactory;
import org.apache.ambari.server.hooks.HookContext;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.SecurityType;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostServerActionEvent;
import org.codehaus.jackson.map.ObjectMapper;
@@ -90,6 +91,9 @@ public class UserHookServiceTest extends EasyMockSupport {
@Mock
private Stage stageMock;
+ @Mock
+ private Config configMock;
+
@TestSubject
private UserHookService hookService = new UserHookService();
@@ -193,9 +197,14 @@ public class UserHookServiceTest extends EasyMockSupport {
Map<String, Cluster> clsMap = new HashMap<>();
clsMap.put("test-cluster", clusterMock);
+ Map<String, String> configMap = new HashMap<>();
+ configMap.put("hdfs_user", "hdfs-test-user");
+
EasyMock.expect(clusterMock.getClusterId()).andReturn(1l);
EasyMock.expect(clusterMock.getClusterName()).andReturn("test-cluster");
EasyMock.expect(clusterMock.getSecurityType()).andReturn(SecurityType.NONE).times(3);
+ EasyMock.expect(clusterMock.getDesiredConfigByType("hadoop-env")).andReturn(configMock);
+ EasyMock.expect(configMock.getProperties()).andReturn(configMap);
EasyMock.expect(actionManagerMock.getNextRequestId()).andReturn(1l);
http://git-wip-us.apache.org/repos/asf/ambari/blob/86fbb381/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerActionTest.java
index a4d50ef..6d6e42c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/users/PostUserCreationHookServerActionTest.java
@@ -115,6 +115,7 @@ public class PostUserCreationHookServerActionTest extends EasyMockSupport {
commandParams.put(UserHookParams.CMD_TIME_FRAME.param(), "1000");
commandParams.put(UserHookParams.CMD_INPUT_FILE.param(), "/test/user_data.csv");
commandParams.put(UserHookParams.CLUSTER_SECURITY_TYPE.param(), SecurityType.KERBEROS.name());
+ commandParams.put(UserHookParams.CMD_HDFS_USER.param(), "test-hdfs-user");
EasyMock.expect(executionCommand.getCommandParams()).andReturn(commandParams);
EasyMock.expect(objectMapperMock.readValue(payloadJson, Map.class)).andReturn(payload);
@@ -139,7 +140,7 @@ public class PostUserCreationHookServerActionTest extends EasyMockSupport {
String[] commandArray = commandCapture.getValue();
Assert.assertNotNull("The command to be executed must not be null!", commandArray);
- Assert.assertEquals("The command argument array length is not as expected!", 5, commandArray.length);
+ Assert.assertEquals("The command argument array length is not as expected!", 6, commandArray.length);
Assert.assertEquals("The command script is not as expected", "/hookfolder/hook.name", commandArray[0]);
}
[22/30] ambari git commit: AMBARI-19666 : Fixed Hive view loosing one
row every 100 row in result set. (nitirajrathore)
Posted by nc...@apache.org.
AMBARI-19666 : Fixed Hive view loosing one row every 100 row in result set. (nitirajrathore)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9f5a8bf6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9f5a8bf6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9f5a8bf6
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9f5a8bf6a7c670845adfa07369e9c56ff68e9798
Parents: 37b4142
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Mon Jan 30 14:33:43 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Mon Jan 30 14:34:26 2017 +0530
----------------------------------------------------------------------
.../org/apache/ambari/view/hive2/actor/ResultSetIterator.java | 3 ++-
.../org/apache/ambari/view/hive20/actor/ResultSetIterator.java | 3 ++-
2 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9f5a8bf6/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/actor/ResultSetIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/actor/ResultSetIterator.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/actor/ResultSetIterator.java
index 1052bd4..5bde12d 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/actor/ResultSetIterator.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/actor/ResultSetIterator.java
@@ -116,7 +116,8 @@ public class ResultSetIterator extends HiveActor {
}
int index = 0;
try {
- while (resultSet.next() && index < batchSize) {
+ // check batchsize first becaue resultSet.next() fetches the new row as well before returning true/false.
+ while (index < batchSize && resultSet.next()) {
index++;
rows.add(getRowFromResultSet(resultSet));
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/9f5a8bf6/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
index 4b4a407..5da5ee1 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/actor/ResultSetIterator.java
@@ -115,7 +115,8 @@ public class ResultSetIterator extends HiveActor {
}
int index = 0;
try {
- while (resultSet.next() && index < batchSize) {
+ // check batchsize first becaue resultSet.next() fetches the new row as well before returning true/false.
+ while (index < batchSize && resultSet.next()) {
index++;
rows.add(getRowFromResultSet(resultSet));
}
[16/30] ambari git commit: AMBARI-19764. yarn.min.container.size is
read incorrectly on first load of the Hive config page (smohanty)
Posted by nc...@apache.org.
AMBARI-19764. yarn.min.container.size is read incorrectly on first load of the Hive config page (smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1eb4ca50
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1eb4ca50
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1eb4ca50
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1eb4ca50b5043ebd8326db90851520f3ea359731
Parents: 716b2fc
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Sun Jan 29 08:45:39 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Sun Jan 29 08:45:39 2017 -0800
----------------------------------------------------------------------
.../stacks/HDP/2.0.6/services/stack_advisor.py | 12 ++
.../src/main/resources/stacks/stack_advisor.py | 38 ++++-
.../stacks/2.0.6/common/test_stack_advisor.py | 148 +++++++++++++++++++
3 files changed, 191 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1eb4ca50/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index af4539d..5c68f15 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -1457,6 +1457,18 @@ def getUserOperationContext(services, contextName):
return userContext[contextName]
return None
+# if serviceName is being added
+def isServiceBeingAdded(services, serviceName):
+ if services:
+ if 'user-context' in services.keys():
+ userContext = services["user-context"]
+ if DefaultStackAdvisor.OPERATION in userContext and \
+ 'AddService' == userContext[DefaultStackAdvisor.OPERATION] and \
+ DefaultStackAdvisor.OPERATION_DETAILS in userContext:
+ if -1 != userContext["operation_details"].find(serviceName):
+ return True
+ return False
+
# Validation helper methods
def getSiteProperties(configurations, siteName):
siteConfig = configurations.get(siteName)
http://git-wip-us.apache.org/repos/asf/ambari/blob/1eb4ca50/ambari-server/src/main/resources/stacks/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/stack_advisor.py b/ambari-server/src/main/resources/stacks/stack_advisor.py
index 9eb3973..d4b9ab7 100644
--- a/ambari-server/src/main/resources/stacks/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/stack_advisor.py
@@ -1024,15 +1024,25 @@ class DefaultStackAdvisor(StackAdvisor):
callContext = self.getCallContext(services)
operation = self.getUserOperationContext(services, DefaultStackAdvisor.OPERATION)
+ adding_yarn = self.isServiceBeingAdded(services, 'YARN')
if operation:
Logger.info("user operation context : " + str(operation))
if services: # its never None but some unit tests pass it as None
# If min container value is changed (user is changing it)
- # if its a validation call - just used what ever value is set
- # If its not a cluster create or add yarn service (TBD)
- if (self.getOldValue(services, "yarn-site", "yarn.scheduler.minimum-allocation-mb") or \
- 'recommendConfigurations' != callContext) and operation != DefaultStackAdvisor.CLUSTER_CREATE_OPERATION:
+ # if its a validation call - just use what ever value is set
+ # If its a recommend attribute call (when UI lands on a page)
+ # If add service but YARN is not being added
+ if self.getOldValue(services, "yarn-site", "yarn.scheduler.minimum-allocation-mb") or \
+ 'recommendConfigurations' != callContext or \
+ operation == DefaultStackAdvisor.RECOMMEND_ATTRIBUTE_OPERATION or \
+ (operation == DefaultStackAdvisor.ADD_SERVICE_OPERATION and not adding_yarn):
+
+ Logger.info("Full context: callContext = " + str(callContext) +
+ " and operation = " + str(operation) + " and adding YARN = " + str(adding_yarn) +
+ " and old value exists = " +
+ str(self.getOldValue(services, "yarn-site", "yarn.scheduler.minimum-allocation-mb")))
+
'''yarn.scheduler.minimum-allocation-mb has changed - then pick this value up'''
if "yarn-site" in services["configurations"] and \
"yarn.scheduler.minimum-allocation-mb" in services["configurations"]["yarn-site"]["properties"] and \
@@ -1087,11 +1097,25 @@ class DefaultStackAdvisor(StackAdvisor):
def getCallContext(self, services):
if services:
- if 'context' in services:
- Logger.info("context : " + str (services['context']))
- return services['context']['call_type']
+ if DefaultStackAdvisor.ADVISOR_CONTEXT in services:
+ Logger.info("call type context : " + str(services[DefaultStackAdvisor.ADVISOR_CONTEXT]))
+ return services[DefaultStackAdvisor.ADVISOR_CONTEXT][DefaultStackAdvisor.CALL_TYPE]
return ""
+
+ # if serviceName is being added
+ def isServiceBeingAdded(self, services, serviceName):
+ if services:
+ if 'user-context' in services.keys():
+ userContext = services["user-context"]
+ if DefaultStackAdvisor.OPERATION in userContext and \
+ 'AddService' == userContext[DefaultStackAdvisor.OPERATION] and \
+ DefaultStackAdvisor.OPERATION_DETAILS in userContext:
+ if -1 != userContext["operation_details"].find(serviceName):
+ return True
+ return False
+
+
def getUserOperationContext(self, services, contextName):
if services:
if 'user-context' in services.keys():
http://git-wip-us.apache.org/repos/asf/ambari/blob/1eb4ca50/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 388d7f8..60c48d6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -700,6 +700,154 @@ class TestHDP206StackAdvisor(TestCase):
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, services)
self.assertEquals(result, expected_2048)
+ # Recommend attribute call - pick user specified value
+ services = {"services":
+ [{"StackServices":
+ {"service_name": "YARN",
+ "service_version": "2.6.0.2.2"
+ },
+ "components": [
+ {
+ "StackServiceComponents": {
+ "advertise_version": "true",
+ "cardinality": "1+",
+ "component_category": "SLAVE",
+ "component_name": "NODEMANAGER",
+ "custom_commands": [
+
+ ],
+ "display_name": "NodeManager",
+ "is_client": "false",
+ "is_master": "false",
+ "service_name": "YARN",
+ "stack_name": "HDP",
+ "stack_version": "2.2",
+ "hostnames": [
+ "host1"
+ ]
+ },
+ "dependencies": [
+ ]
+ }
+ ],
+ }],
+ "configurations": {
+ "yarn-site": {
+ "properties": {
+ "yarn.scheduler.minimum-allocation-mb": "2048",
+ "yarn.scheduler.maximum-allocation-mb": "12288"
+ }
+ }
+ },
+ "changed-configurations": [],
+ "user-context" : {
+ "operation" : "RecommendAttribute"
+ },
+ "advisor_context": {'call_type': 'recommendConfigurations'}
+ }
+
+ result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, services)
+ self.assertEquals(result, expected_2048)
+
+ # Add service and not adding YARN - pick user specified value
+ services = {"services":
+ [{"StackServices":
+ {"service_name": "YARN",
+ "service_version": "2.6.0.2.2"
+ },
+ "components": [
+ {
+ "StackServiceComponents": {
+ "advertise_version": "true",
+ "cardinality": "1+",
+ "component_category": "SLAVE",
+ "component_name": "NODEMANAGER",
+ "custom_commands": [
+
+ ],
+ "display_name": "NodeManager",
+ "is_client": "false",
+ "is_master": "false",
+ "service_name": "YARN",
+ "stack_name": "HDP",
+ "stack_version": "2.2",
+ "hostnames": [
+ "host1"
+ ]
+ },
+ "dependencies": [
+ ]
+ }
+ ],
+ }],
+ "configurations": {
+ "yarn-site": {
+ "properties": {
+ "yarn.scheduler.minimum-allocation-mb": "2048",
+ "yarn.scheduler.maximum-allocation-mb": "12288"
+ }
+ }
+ },
+ "changed-configurations": [],
+ "user-context" : {
+ "operation" : "AddService",
+ "operation_details" : "TEZ,HIVE,SLIDER"
+ },
+ "advisor_context": {'call_type': 'recommendConfigurations'}
+ }
+
+ result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, services)
+ self.assertEquals(result, expected_2048)
+
+ # Add service and adding YARN - compute the value
+ services = {"services":
+ [{"StackServices":
+ {"service_name": "YARN",
+ "service_version": "2.6.0.2.2"
+ },
+ "components": [
+ {
+ "StackServiceComponents": {
+ "advertise_version": "true",
+ "cardinality": "1+",
+ "component_category": "SLAVE",
+ "component_name": "NODEMANAGER",
+ "custom_commands": [
+
+ ],
+ "display_name": "NodeManager",
+ "is_client": "false",
+ "is_master": "false",
+ "service_name": "YARN",
+ "stack_name": "HDP",
+ "stack_version": "2.2",
+ "hostnames": [
+ "host1"
+ ]
+ },
+ "dependencies": [
+ ]
+ }
+ ],
+ }],
+ "configurations": {
+ "yarn-site": {
+ "properties": {
+ "yarn.scheduler.minimum-allocation-mb": "512"
+ }
+ }
+ },
+ "changed-configurations": [],
+ "user-context" : {
+ "operation" : "AddService",
+ "operation_details" : "TEZ,HIVE,YARN,SLIDER"
+ },
+ "advisor_context": {'call_type': 'recommendConfigurations'}
+ }
+
+ result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, services)
+ self.assertEquals(result, expected)
+
# Recommend config dependencies call - pick user specified value
services = {"services":
[{"StackServices":
[03/30] ambari git commit: AMBARI-19663. LLAP metrics collector host
gets wrongly updated. (prasanthj via Swapan Shridhar).
Posted by nc...@apache.org.
AMBARI-19663. LLAP metrics collector host gets wrongly updated. (prasanthj via Swapan Shridhar).
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/23522a28
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/23522a28
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/23522a28
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 23522a28126bbb1216bb1b556fce1d24a7262d26
Parents: 6d02a7a
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Fri Jan 27 11:16:03 2017 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Fri Jan 27 11:18:30 2017 -0800
----------------------------------------------------------------------
.../HIVE/0.12.0.2.0/package/scripts/params_linux.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/23522a28/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 997975e..4907f93 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -513,7 +513,7 @@ hive_site_config = dict(config['configurations']['hive-site'])
########################################################
############# AMS related params #####################
########################################################
-ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
+ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", []))
has_metric_collector = not len(ams_collector_hosts) == 0
if has_metric_collector:
if 'cluster-env' in config['configurations'] and \
@@ -785,4 +785,4 @@ if enable_ranger_hive:
if has_ranger_admin and stack_supports_ranger_audit_db and xa_audit_db_flavor.lower() == 'sqla':
xa_audit_db_is_enabled = False
-# ranger hive plugin section end
\ No newline at end of file
+# ranger hive plugin section end
[30/30] ambari git commit: Merge branch 'trunk' into
branch-dev-patch-upgrade
Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/393ab3ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/393ab3ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/393ab3ee
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 393ab3ee28a27c63084ee4700bf0386b6ceb4a9b
Parents: 535327d d6d2479
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Jan 30 10:40:13 2017 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Jan 30 10:40:13 2017 -0500
----------------------------------------------------------------------
.../src/main/python/ambari_agent/ActionQueue.py | 28 ++--
.../libraries/functions/solr_cloud_util.py | 6 +-
.../ambari-logsearch-portal/pom.xml | 5 +
.../org/apache/ambari/logsearch/LogSearch.java | 98 +-----------
.../apache/ambari/logsearch/util/FileUtil.java | 26 ++++
.../apache/ambari/logsearch/util/SSLUtil.java | 141 ++++++++++++++----
.../logsearch/solr/AmbariSolrCloudCLI.java | 13 +-
.../logsearch/solr/AmbariSolrCloudClient.java | 6 +
.../solr/AmbariSolrCloudClientBuilder.java | 6 +
.../EnableKerberosPluginSolrZkCommand.java | 25 +++-
.../AmbariCustomCommandExecutionHelper.java | 16 ++
.../server/hooks/users/UserHookParams.java | 3 +-
.../server/hooks/users/UserHookService.java | 23 ++-
.../orm/entities/ServiceConfigEntity.java | 2 +-
.../AmbariAuthToLocalUserDetailsService.java | 17 ++-
.../users/CsvFilePersisterService.java | 6 +-
.../users/PostUserCreationHookServerAction.java | 8 +-
ambari-server/src/main/python/bootstrap.py | 3 +-
.../configuration/infra-solr-security-json.xml | 36 +++++
.../AMBARI_INFRA/0.1.0/metainfo.xml | 1 +
.../0.1.0/package/scripts/params.py | 2 +
.../0.1.0/package/scripts/setup_infra_solr.py | 12 +-
.../properties/infra-solr-security.json.j2 | 22 +++
.../2.1.0.2.0/package/scripts/hdfs_namenode.py | 4 +
.../HDFS/2.1.0.2.0/package/scripts/utils.py | 14 +-
.../2.1.0.2.0/package/scripts/zkfc_slave.py | 5 -
.../3.0.0.3.0/package/scripts/hdfs_namenode.py | 4 +
.../HDFS/3.0.0.3.0/package/scripts/utils.py | 42 +++---
.../3.0.0.3.0/package/scripts/zkfc_slave.py | 7 -
.../package/scripts/hive_interactive.py | 4 +-
.../package/scripts/hive_server_interactive.py | 6 +-
.../0.12.0.2.0/package/scripts/params_linux.py | 12 +-
.../scripts/post-user-creation-hook.sh | 9 +-
.../2.0.6/hooks/before-ANY/scripts/params.py | 16 +-
.../stacks/HDP/2.0.6/services/stack_advisor.py | 12 ++
.../stacks/HDP/2.3/upgrades/config-upgrade.xml | 20 +++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml | 12 ++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml | 12 ++
.../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml | 12 ++
.../stacks/HDP/2.3/upgrades/upgrade-2.4.xml | 11 ++
.../stacks/HDP/2.3/upgrades/upgrade-2.5.xml | 11 ++
.../stacks/HDP/2.3/upgrades/upgrade-2.6.xml | 11 ++
.../stacks/HDP/2.4/upgrades/config-upgrade.xml | 20 +++
.../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml | 14 ++
.../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml | 12 ++
.../stacks/HDP/2.4/upgrades/upgrade-2.5.xml | 11 ++
.../stacks/HDP/2.4/upgrades/upgrade-2.6.xml | 11 ++
.../services/HIVE/configuration/hive-log4j2.xml | 25 +++-
.../HIVE/configuration/llap-cli-log4j2.xml | 25 +++-
.../stacks/HDP/2.5/services/stack_advisor.py | 12 ++
.../stacks/HDP/2.5/upgrades/config-upgrade.xml | 28 ++++
.../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml | 14 +-
.../stacks/HDP/2.5/upgrades/upgrade-2.6.xml | 6 +
.../stacks/HDP/2.6/services/SPARK2/metainfo.xml | 5 +-
.../HDP/3.0/hooks/before-ANY/scripts/params.py | 19 ++-
.../FAKEHDFS/package/scripts/zkfc_slave.py | 5 +
.../PERF/1.0/services/FAKEYARN/metainfo.xml | 4 +-
.../src/main/resources/stacks/stack_advisor.py | 38 ++++-
.../server/hooks/users/UserHookServiceTest.java | 9 ++
.../server/orm/dao/ServiceConfigDAOTest.java | 66 +++++++--
.../UpdateKerberosConfigsServerActionTest.java | 14 +-
.../PostUserCreationHookServerActionTest.java | 3 +-
.../stacks/2.0.6/common/test_stack_advisor.py | 148 +++++++++++++++++++
.../stacks/2.4/AMBARI_INFRA/test_infra_solr.py | 7 +
.../test/python/stacks/2.4/configs/default.json | 3 +
.../stacks/2.5/HIVE/test_hive_server_int.py | 8 +-
.../2.5/common/services-normal-his-2-hosts.json | 8 +-
.../2.5/common/services-normal-his-valid.json | 8 +-
.../stacks/2.5/common/test_stack_advisor.py | 18 ++-
ambari-web/app/styles/modal_popups.less | 42 +++++-
.../views/common/modal_popups/log_tail_popup.js | 2 +-
.../admin/stack_upgrade/upgrade_history_view.js | 2 +-
.../src/main/resources/ui/app/models/queue.js | 3 +
.../view/hive2/actor/ResultSetIterator.java | 3 +-
.../view/hive20/actor/ResultSetIterator.java | 3 +-
.../hive20/src/main/resources/ui/app/app.js | 9 ++
.../components/multiple-database-search-bar.js | 31 +++-
.../resources/ui/app/controllers/queries.js | 22 +++
.../main/resources/ui/app/models/worksheet.js | 5 +-
.../hive20/src/main/resources/ui/app/router.js | 2 +
.../main/resources/ui/app/routes/application.js | 4 +-
.../ui/app/routes/databases/newtable.js | 30 ++++
.../src/main/resources/ui/app/routes/index.js | 26 ++++
.../src/main/resources/ui/app/routes/queries.js | 49 +-----
.../resources/ui/app/routes/queries/index.js | 9 +-
.../main/resources/ui/app/routes/queries/new.js | 38 +++++
.../resources/ui/app/routes/queries/query.js | 29 ++--
.../src/main/resources/ui/app/services/jobs.js | 10 +-
.../src/main/resources/ui/app/styles/app.scss | 4 +
.../components/multiple-database-search-bar.hbs | 11 +-
.../components/top-application-bar.hbs | 4 +-
.../ui/app/templates/queries/query.hbs | 4 +-
.../src/main/resources/ui/config/environment.js | 2 +
.../ui/app/components/bundle-config.js | 2 +-
.../ui/app/components/flow-designer.js | 5 +-
.../ui/app/components/workflow-action-editor.js | 2 +-
.../ui/app/domain/actionjob_hanlder.js | 1 +
.../app/domain/bundle/bundle-xml-generator.js | 4 +-
.../ui/app/domain/bundle/bundle-xml-importer.js | 6 +-
.../app/templates/components/flow-designer.hbs | 8 +-
.../ui/app/templates/components/sla-info.hbs | 6 +-
.../app/templates/components/sqoop-action.hbs | 2 +-
.../templates/components/workflow-actions.hbs | 2 +-
103 files changed, 1277 insertions(+), 375 deletions(-)
----------------------------------------------------------------------
[23/30] ambari git commit: AMBARI-19772. Job XML tag is not coming as
part of FS action node (Madhan Mohan Reddy via gauravn7)
Posted by nc...@apache.org.
AMBARI-19772. Job XML tag is not coming as part of FS action node (Madhan Mohan Reddy via gauravn7)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75c126e4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75c126e4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75c126e4
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 75c126e49231e3ed33e8cf7d6cd30747f7ef6db5
Parents: 9f5a8bf
Author: Gaurav Nagar <gr...@gmail.com>
Authored: Mon Jan 30 14:49:28 2017 +0530
Committer: Gaurav Nagar <gr...@gmail.com>
Committed: Mon Jan 30 14:49:28 2017 +0530
----------------------------------------------------------------------
.../wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/75c126e4/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
----------------------------------------------------------------------
diff --git a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
index c9d9887..2a82c24 100644
--- a/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
+++ b/contrib/views/wfmanager/src/main/resources/ui/app/domain/actionjob_hanlder.js
@@ -363,6 +363,7 @@ var FSActionJobHandler=ActionJobHandler.extend({
init(){
this.mapping=[
{xml:"name-node",domain:"nameNode"},
+ {xml:"job-xml",domain:"jobXml",occurs:"many",domainProperty:"value"},
{xml:"configuration", customHandler:this.configurationMapper},
{xml:"delete"},
{xml:"mkdir"},
[08/30] ambari git commit: AMBARI-19761. HSI start fails after Ambari
upgrade with Unrecognized option: --skiphadoopversion and --skiphbasecp
(Swapan Shridhar via smohanty)
Posted by nc...@apache.org.
AMBARI-19761. HSI start fails after Ambari upgrade with Unrecognized option: --skiphadoopversion and --skiphbasecp (Swapan Shridhar via smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/771c9c01
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/771c9c01
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/771c9c01
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 771c9c011b3ea00e4ad0171b04bd851c7ee74ee0
Parents: c600985
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Fri Jan 27 17:53:04 2017 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Fri Jan 27 17:53:04 2017 -0800
----------------------------------------------------------------------
.../HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/771c9c01/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index 924c88c..ce10824 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -284,9 +284,9 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
cmd = format("{stack_root}/current/hive-server2-hive2/bin/hive --service llap --instances {params.num_llap_nodes}"
" --slider-am-container-mb {params.slider_am_container_mb} --size {params.llap_daemon_container_size}m"
" --cache {params.hive_llap_io_mem_size}m --xmx {params.llap_heap_size}m --loglevel {params.llap_log_level}"
- " {params.llap_extra_slider_opts} --skiphadoopversion --skiphbasecp --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
+ " {params.llap_extra_slider_opts} --output {LLAP_PACKAGE_CREATION_PATH}/{unique_name}")
- # '--slider-placement' param is supported from HDP Hive GA version.
+ # Append params that are supported from Hive llap GA version.
if params.stack_supports_hive_interactive_ga:
# Figure out the Slider Anti-affinity to be used.
# YARN does not support anti-affinity, and therefore Slider implements AA by the means of exclusion lists, i.e, it
@@ -300,7 +300,7 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
else:
Logger.info("Setting slider_placement: 4, as llap_daemon_container_size : {0} <= 0.5 * "
"YARN NodeManager Memory({1})".format(params.llap_daemon_container_size, params.yarn_nm_mem))
- cmd += format(" --slider-placement {slider_placement}")
+ cmd += format(" --slider-placement {slider_placement} --skiphadoopversion --skiphbasecp")
if params.security_enabled:
llap_keytab_splits = params.hive_llap_keytab_file.split("/")