You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/09/28 17:27:58 UTC
[01/19] ambari git commit: AMBARI-18465. Log results from shell
commands run as user (aonishuk)
Repository: ambari
Updated Branches:
refs/heads/branch-dev-patch-upgrade 75b656c73 -> 2d60c5267
AMBARI-18465. Log results from shell commands run as user (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8192601d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8192601d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8192601d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8192601dfa606e72084813969b7de5af39bd083d
Parents: 5a29d48
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 26 18:18:34 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 26 18:18:34 2016 +0300
----------------------------------------------------------------------
.../libraries/functions/get_user_call_output.py | 13 +++++++++++--
1 file changed, 11 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8192601d/ambari-common/src/main/python/resource_management/libraries/functions/get_user_call_output.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/get_user_call_output.py b/ambari-common/src/main/python/resource_management/libraries/functions/get_user_call_output.py
index 4b11614..e0723c6 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/get_user_call_output.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/get_user_call_output.py
@@ -21,6 +21,7 @@ Ambari Agent
"""
import os
+import sys
import tempfile
from resource_management.core import shell
from resource_management.core.logger import Logger
@@ -60,9 +61,17 @@ def get_user_call_output(command, user, quiet=False, is_checked_call=True, **cal
if is_checked_call:
raise Fail(err_msg)
else:
- Logger.warning(err_msg)
+ Logger.warning(err_msg)
+
+ result = code, files_output[0], files_output[1]
- return code, files_output[0], files_output[1]
+ caller_filename = sys._getframe(1).f_code.co_filename
+ is_internal_call = shell.NOT_LOGGED_FOLDER in caller_filename
+ if quiet == False or (quiet == None and not is_internal_call):
+ log_msg = "{0} returned {1}".format(get_user_call_output.__name__, result)
+ Logger.info(log_msg)
+
+ return result
finally:
for f in out_files:
f.close()
[13/19] ambari git commit: AMBARI-18455. Ambari dashboard HDFS links
widget incorrectly shows 2 standby namenode (akovalenko)
Posted by nc...@apache.org.
AMBARI-18455. Ambari dashboard HDFS links widget incorrectly shows 2 standby namenode (akovalenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a0fff847
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a0fff847
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a0fff847
Branch: refs/heads/branch-dev-patch-upgrade
Commit: a0fff8472a3e60972a0ae57d30c09e867d86ad0f
Parents: 6fb1cee
Author: Aleksandr Kovalenko <ak...@hortonworks.com>
Authored: Wed Sep 28 13:25:02 2016 +0300
Committer: Aleksandr Kovalenko <ak...@hortonworks.com>
Committed: Wed Sep 28 13:25:02 2016 +0300
----------------------------------------------------------------------
ambari-web/app/views/main/dashboard/widgets/hdfs_links.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a0fff847/ambari-web/app/views/main/dashboard/widgets/hdfs_links.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/dashboard/widgets/hdfs_links.js b/ambari-web/app/views/main/dashboard/widgets/hdfs_links.js
index 128b997..cfe5eb1 100644
--- a/ambari-web/app/views/main/dashboard/widgets/hdfs_links.js
+++ b/ambari-web/app/views/main/dashboard/widgets/hdfs_links.js
@@ -43,7 +43,7 @@ App.HDFSLinksView = App.LinkDashboardWidgetView.extend({
isStandbyNNValid: Em.computed.bool('model.standbyNameNode'),
- isTwoStandbyNN: Em.computed.and('isActiveNNValid', 'isStandbyNNValid'),
+ isTwoStandbyNN: Em.computed.and('model.standbyNameNode', 'model.standbyNameNode2'),
twoStandbyComponent: function () {
return App.HostComponent.find().findProperty('componentName', 'NAMENODE');
[12/19] ambari git commit: AMBARI-18469. Fix custom jdbc
functionality for ambari-server setup.(vbrodetskyi)
Posted by nc...@apache.org.
AMBARI-18469. Fix custom jdbc functionality for ambari-server setup.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/05e65e29
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/05e65e29
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/05e65e29
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 05e65e297a09e1498a8ee75fd55d059c00f99976
Parents: 20ce57b
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Wed Sep 28 09:26:32 2016 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Wed Sep 28 09:26:32 2016 +0300
----------------------------------------------------------------------
.../python/ambari_server/dbConfiguration.py | 52 +++++++--
.../src/test/python/TestAmbariServer.py | 107 +++++++------------
2 files changed, 79 insertions(+), 80 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/05e65e29/ambari-server/src/main/python/ambari_server/dbConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/dbConfiguration.py b/ambari-server/src/main/python/ambari_server/dbConfiguration.py
index ede257f..1d54dce 100644
--- a/ambari-server/src/main/python/ambari_server/dbConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/dbConfiguration.py
@@ -19,17 +19,19 @@ limitations under the License.
'''
import glob
import os
+import shutil
from ambari_commons import OSConst, OSCheck
from ambari_commons.exceptions import FatalException
from ambari_commons.logging_utils import get_silent, print_error_msg, print_info_msg, print_warning_msg, set_silent
from ambari_commons.os_family_impl import OsFamilyImpl
from ambari_commons.str_utils import cbool
+from ambari_server.serverClassPath import JDBC_DRIVER_PATH_PROPERTY
from ambari_server.serverConfiguration import decrypt_password_for_alias, get_ambari_properties, get_is_secure, \
get_resources_location, get_value_from_properties, is_alias_string, \
JDBC_PASSWORD_PROPERTY, JDBC_RCA_PASSWORD_ALIAS, PRESS_ENTER_MSG, DEFAULT_DBMS_PROPERTY, JDBC_DATABASE_PROPERTY, \
- PERSISTENCE_TYPE_PROPERTY
-from ambari_server.userInput import get_validated_string_input
+ PERSISTENCE_TYPE_PROPERTY, update_properties, configDefaults
+from ambari_server.userInput import get_YN_input, get_validated_string_input
#Database settings
@@ -160,14 +162,46 @@ class DBMSConfig(object):
pass
def ensure_jdbc_driver_installed(self, properties):
- (result, msg) = self._prompt_jdbc_driver_install(properties)
- if result == -1:
- print_error_msg(msg)
- raise FatalException(-1, msg)
+ server_jdbc_path = properties.get_property(JDBC_DRIVER_PATH_PROPERTY)
+ if server_jdbc_path and os.path.isfile(server_jdbc_path):
+ return True
+
+ default_driver_path = self._get_default_driver_path(properties)
+ if default_driver_path and os.path.isfile(default_driver_path):
+ ambari_should_use_existing_default_jdbc = get_YN_input("Should ambari use existing default jdbc {0} [y/n] (y)? ".format(default_driver_path), True)
+ if ambari_should_use_existing_default_jdbc:
+ properties.process_pair(JDBC_DRIVER_PATH_PROPERTY, default_driver_path)
+ update_properties(properties)
+ return True
+
+ path_to_custom_jdbc_driver = get_validated_string_input("Enter full path to custom jdbc driver: ", None, None, None, False, False)
+ if path_to_custom_jdbc_driver and os.path.isfile(path_to_custom_jdbc_driver):
+ try:
+ custom_jdbc_name = os.path.basename(path_to_custom_jdbc_driver)
+ if not path_to_custom_jdbc_driver == os.path.join(configDefaults.JAVA_SHARE_PATH, custom_jdbc_name):
+ if os.path.isfile(os.path.join(configDefaults.JAVA_SHARE_PATH, custom_jdbc_name)):
+ replace_jdbc_in_share_dir = get_YN_input("You already have file {0} in /usr/share/java/. Should it be replaced? [y/n] (y)? ".format(custom_jdbc_name), True)
+ if replace_jdbc_in_share_dir:
+ try:
+ os.remove(os.path.join(configDefaults.JAVA_SHARE_PATH, custom_jdbc_name))
+ except Exception, ee:
+ err = 'ERROR: Could not remove jdbc file. %s' % os.path.join(configDefaults.JAVA_SHARE_PATH, custom_jdbc_name)
+ raise FatalException(1, err)
+ shutil.copy(path_to_custom_jdbc_driver, configDefaults.JAVA_SHARE_PATH)
+ print "Copying {0} to {1}".format(path_to_custom_jdbc_driver, configDefaults.JAVA_SHARE_PATH)
+ except Exception, e:
+ err = "Can not copy file {0} to {1} due to: {2} . Please check file " \
+ "permissions and free disk space.".format(path_to_custom_jdbc_driver, configDefaults.JAVA_SHARE_PATH, str(e))
+ raise FatalException(1, err)
+
+ properties.process_pair(JDBC_DRIVER_PATH_PROPERTY, path_to_custom_jdbc_driver)
+ update_properties(properties)
+ return True
+ else:
+ print_error_msg("Custom jdbc connector path is unavailable. Please put correct path to jdbc connector.")
+
+ return False
- if result != 1:
- result = self._install_jdbc_driver(properties, result)
- return cbool(result)
def change_db_files_owner(self):
if self._is_local_database():
http://git-wip-us.apache.org/repos/asf/ambari/blob/05e65e29/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 5746503..ed200b8 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -125,7 +125,7 @@ with patch.object(platform, "linux_distribution", return_value = MagicMock(retur
from ambari_server.userInput import get_YN_input, get_choice_string_input, get_validated_string_input, \
read_password
from ambari_server_main import get_ulimit_open_files, ULIMIT_OPEN_FILES_KEY, ULIMIT_OPEN_FILES_DEFAULT
- from ambari_server.serverClassPath import ServerClassPath
+ from ambari_server.serverClassPath import JDBC_DRIVER_PATH_PROPERTY, ServerClassPath
from ambari_server.hostUpdate import update_host_names
from ambari_server.checkDatabase import check_database
from ambari_server import serverConfiguration
@@ -6459,7 +6459,12 @@ class TestAmbariServer(TestCase):
@patch("os.remove")
@patch("os.symlink")
@patch("shutil.copy")
- def test_ensure_jdbc_drivers_installed(self, shutil_copy_mock, os_symlink_mock, os_remove_mock, lexists_mock, isdir_mock, glob_mock,
+ @patch("os.path.isfile")
+ @patch("ambari_server.dbConfiguration.get_YN_input")
+ @patch("ambari_server.dbConfiguration.update_properties")
+ @patch("ambari_server.dbConfiguration.get_validated_string_input")
+ def test_ensure_jdbc_drivers_installed(self, get_valid_str_in_mock, update_properties_mock, getYN_mock, isfile_mock, shutil_copy_mock,
+ os_symlink_mock, os_remove_mock, lexists_mock, isdir_mock, glob_mock,
raw_input_mock, print_warning_msg, print_error_msg_mock, print_error_msg_2_mock,
get_ambari_properties_mock, get_ambari_properties_2_mock):
out = StringIO.StringIO()
@@ -6472,7 +6477,11 @@ class TestAmbariServer(TestCase):
print_error_msg_mock.reset_mock()
print_warning_msg.reset_mock()
raw_input_mock.reset_mock()
-
+ isfile_mock.reset_mock()
+ os_remove_mock.reset_mock
+ getYN_mock.reset_mock()
+ get_valid_str_in_mock.reset_mock()
+ update_properties_mock.reset_mock()
args = MagicMock()
del args.database_index
@@ -6485,12 +6494,13 @@ class TestAmbariServer(TestCase):
return args
- # Check positive scenario
+ # Check scenario when default jdbc was found and used
drivers_list = [os.path.join(os.sep,'usr','share','java','ojdbc6.jar')]
resources_dir = os.sep + 'tmp'
props = Properties()
props.process_pair(RESOURCES_DIR_PROPERTY, resources_dir)
+ props.process_pair(JDBC_DRIVER_PATH_PROPERTY, "/some/test/path/to/oracle.-jdbc.jar")
get_ambari_properties_2_mock.return_value = get_ambari_properties_mock.return_value = props
factory = DBMSConfigFactory()
@@ -6500,94 +6510,49 @@ class TestAmbariServer(TestCase):
isdir_mock.return_value = True
lexists_mock.return_value = True
+ isfile_mock.return_value = True
+ getYN_mock.return_value = True
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
- self.assertEquals(os_symlink_mock.call_count, 1)
- self.assertEquals(os_symlink_mock.call_args_list[0][0][0], os.path.join(os.sep,'tmp','ojdbc6.jar'))
- self.assertEquals(os_symlink_mock.call_args_list[0][0][1], os.path.join(os.sep,'tmp','oracle-jdbc-driver.jar'))
+ self.assertEquals(update_properties_mock.call_count, 0)
self.assertTrue(rcode)
- self.assertEquals(shutil_copy_mock.call_count, 1)
- self.assertEquals(shutil_copy_mock.call_args_list[0][0][0], drivers_list[0])
- self.assertEquals(shutil_copy_mock.call_args_list[0][0][1], resources_dir)
-
- # Check negative scenarios
- # Silent option, no drivers
- set_silent(True)
args = reset_mocks()
- glob_mock.return_value = []
-
- failed = False
-
- try:
- dbms = factory.create(args, props)
- rcode = dbms.ensure_jdbc_driver_installed(props)
- except FatalException:
- failed = True
-
- self.assertTrue(print_error_msg_mock.called)
- self.assertTrue(failed)
-
- # Non-Silent option, no drivers
- set_silent(False)
-
- args = reset_mocks()
- glob_mock.return_value = []
-
- failed = False
-
- try:
- dbms = factory.create(args, props)
- rcode = dbms.ensure_jdbc_driver_installed(props)
- except FatalException:
- failed = True
-
- self.assertTrue(failed)
- self.assertTrue(print_error_msg_mock.called)
-
- # Non-Silent option, no drivers at first ask, present drivers after that
- args = reset_mocks()
-
- glob_mock.side_effect = [[], drivers_list, drivers_list]
+ isfile_mock.side_effect = [False, True]
+ getYN_mock.return_value = True
dbms = factory.create(args, props)
rcode = dbms.ensure_jdbc_driver_installed(props)
+ self.assertEquals(update_properties_mock.call_count, 1)
+ self.assertEquals(getYN_mock.call_count, 1)
self.assertTrue(rcode)
- self.assertEquals(shutil_copy_mock.call_count, 1)
- self.assertEquals(shutil_copy_mock.call_args_list[0][0][0], drivers_list[0])
- self.assertEquals(shutil_copy_mock.call_args_list[0][0][1], resources_dir)
- # Non-Silent option, no drivers at first ask, no drivers after that
+ # check scenario when user entered valid jdbc full path
args = reset_mocks()
- glob_mock.side_effect = [[], []]
+ isfile_mock.side_effect = [False, False, True, True]
+ get_valid_str_in_mock.return_value = '/test/full/path/to/oracle_jdbc.jar'
- failed = False
-
- try:
- dbms = factory.create(args, props)
- rcode = dbms.ensure_jdbc_driver_installed(props)
- except FatalException:
- failed = True
+ rcode = dbms.ensure_jdbc_driver_installed(props)
- self.assertTrue(failed)
- self.assertTrue(print_error_msg_mock.called)
+ self.assertEquals(update_properties_mock.call_count, 1)
+ self.assertTrue(rcode)
+ self.assertEquals(props['server.jdbc.driver.path'], '/test/full/path/to/oracle_jdbc.jar')
+ self.assertEquals(shutil_copy_mock.call_count, 1)
+ self.assertEquals(shutil_copy_mock.call_count, 1)
+ self.assertEquals(os_remove_mock.call_count, 1)
- # Failed to copy_files
+ # check scenario when no default jdbc and user entered incorrect full jdbc path
args = reset_mocks()
- glob_mock.side_effect = [[], drivers_list, drivers_list]
+ isfile_mock.side_effect = [False, False, False, False]
- try:
- dbms = factory.create(args, props)
- rcode = dbms.ensure_jdbc_driver_installed(props)
- except FatalException:
- failed = True
+ rcode = dbms.ensure_jdbc_driver_installed(props)
- self.assertTrue(failed)
+ self.assertFalse(rcode)
+ print_error_msg_mock.assert_called_once_with("Custom jdbc connector path is unavailable. Please put correct path to jdbc connector.")
- sys.stdout = sys.__stdout__
pass
[08/19] ambari git commit: AMBARI-18459: Print error messages if
bulkcommand section of a component includes non-existing component (dili)
Posted by nc...@apache.org.
AMBARI-18459: Print error messages if bulkcommand section of a component includes non-existing component (dili)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/53b4bd41
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/53b4bd41
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/53b4bd41
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 53b4bd41518a5292d3c6c9cdafecee3898d046f3
Parents: 87423d6
Author: Di Li <di...@apache.org>
Authored: Tue Sep 27 10:58:59 2016 -0400
Committer: Di Li <di...@apache.org>
Committed: Tue Sep 27 10:58:59 2016 -0400
----------------------------------------------------------------------
.../ambari/server/stack/ServiceModule.java | 4 +--
.../apache/ambari/server/stack/StackModule.java | 37 ++++++++++++++++++--
2 files changed, 37 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/53b4bd41/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
index 650bdf1..34e65c3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
@@ -180,7 +180,7 @@ public class ServiceModule extends BaseModule<ServiceModule, ServiceInfo> implem
return;
}
- LOG.info("Resolve service");
+ LOG.debug("Resolve service");
// If resolving against parent stack service module (stack inheritance), do not merge if an
// explicit parent is specified
@@ -193,7 +193,7 @@ public class ServiceModule extends BaseModule<ServiceModule, ServiceInfo> implem
if (serviceInfo.getComment() == null) {
serviceInfo.setComment(parent.getComment());
}
- LOG.info("Display name service/parent: " + serviceInfo.getDisplayName() + "/" + parent.getDisplayName());
+ LOG.info(String.format("Display name service/parent: %s/%s", serviceInfo.getDisplayName(), parent.getDisplayName()));
if (serviceInfo.getDisplayName() == null) {
serviceInfo.setDisplayName(parent.getDisplayName());
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53b4bd41/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
index 93eeb7e..d9eaf27 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -31,6 +31,8 @@ import java.util.Set;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.state.BulkCommandDefinition;
+import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.ExtensionInfo;
import org.apache.ambari.server.state.PropertyDependencyInfo;
@@ -182,7 +184,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
StackModule parentModule, Map<String, StackModule> allStacks, Map<String, ServiceModule> commonServices, Map<String, ExtensionModule> extensions)
throws AmbariException {
moduleState = ModuleState.VISITED;
- LOG.info("Resolve: " + stackInfo.getName() + ":" + stackInfo.getVersion());
+ LOG.info(String.format("Resolve: %s:%s", stackInfo.getName(), stackInfo.getVersion()));
String parentVersion = stackInfo.getParentStackVersion();
mergeServicesWithExplicitParent(allStacks, commonServices, extensions);
addExtensionServices();
@@ -204,6 +206,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
processUpgradePacks();
processRepositories();
processPropertyDependencies();
+ validateBulkCommandComponents(allStacks);
moduleState = ModuleState.RESOLVED;
}
@@ -351,7 +354,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
Map<String, ServiceModule> commonServices, Map<String, ExtensionModule> extensions)
throws AmbariException {
- LOG.info("mergeServiceWithExplicitParent" + parent);
+ LOG.info(String.format("Merge service %s with explicit parent: %s", service.getModuleInfo().getName(), parent));
if(isCommonServiceParent(parent)) {
mergeServiceWithCommonServiceParent(service, parent, allStacks, commonServices, extensions);
} else if(isExtensionServiceParent(parent)) {
@@ -1198,6 +1201,36 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
}
}
+ /**
+ * Validate the component defined in the bulkCommand section is defined for the service
+ * This needs to happen after the stack is resolved
+ * */
+ private void validateBulkCommandComponents(Map<String, StackModule> allStacks){
+ if (null != stackInfo) {
+ String currentStackId = stackInfo.getName() + StackManager.PATH_DELIMITER + stackInfo.getVersion();
+ LOG.debug("Validate bulk command components for: " + currentStackId);
+ StackModule currentStack = allStacks.get(currentStackId);
+ if (null != currentStack){
+ for (ServiceModule serviceModule : currentStack.getServiceModules().values()) {
+ ServiceInfo service = serviceModule.getModuleInfo();
+ for(ComponentInfo component: service.getComponents()){
+ BulkCommandDefinition bcd = component.getBulkCommandDefinition();
+ if (null != bcd && null != bcd.getMasterComponent()){
+ String name = bcd.getMasterComponent();
+ ComponentInfo targetComponent = service.getComponentByName(name);
+ if (null == targetComponent){
+ String serviceName = service.getName();
+ LOG.error(
+ String.format("%s bulk command section for service %s in stack %s references a component %s which doesn't exist.",
+ component.getName(), serviceName, currentStackId, name));
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
@Override
public boolean isValid() {
return valid;
[07/19] ambari git commit: AMBARI-18051 - Services should be able to
provide their own pre-req checks by supplying a jar file
Posted by nc...@apache.org.
AMBARI-18051 - Services should be able to provide their own pre-req checks by supplying a jar file
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/87423d64
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/87423d64
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/87423d64
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 87423d64f54d896c62d1a9245eb03a97763e35a4
Parents: 6d35228
Author: Tim Thorpe <tt...@apache.org>
Authored: Tue Sep 27 06:05:31 2016 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Tue Sep 27 06:05:31 2016 -0700
----------------------------------------------------------------------
ambari-server/pom.xml | 34 +++
.../ambari/server/checks/CheckDescription.java | 5 +-
.../server/checks/UpgradeCheckRegistry.java | 76 ++++++
.../PreUpgradeCheckResourceProvider.java | 27 +-
.../server/stack/CommonServiceDirectory.java | 50 ++--
.../ambari/server/stack/ServiceDirectory.java | 112 +++++++-
.../ambari/server/stack/ServiceModule.java | 4 +
.../server/stack/StackServiceDirectory.java | 81 +++---
.../apache/ambari/server/state/ServiceInfo.java | 14 +
.../PreUpgradeCheckResourceProviderTest.java | 255 +++++++++++++++++++
.../sample/checks/SampleServiceCheck.java | 35 +++
.../ambari/server/stack/ServiceModuleTest.java | 30 +++
.../server/stack/StackManagerExtensionTest.java | 7 +
13 files changed, 636 insertions(+), 94 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 5731c9d..354b6cb 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -576,6 +576,40 @@
</configuration>
</plugin>
<plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>3.0.2</version>
+ <executions>
+ <execution>
+ <id>create-sample-upgrade-check-jar</id>
+ <phase>process-test-classes</phase>
+ <goals>
+ <goal>test-jar</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>target/test-classes/checks</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>generate-test-oozie2-checks-dir</id>
+ <phase>process-test-classes</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <tasks>
+ <mkdir dir="target/test-classes/extensions/EXT/0.1/services/OOZIE2/checks/tmp"/>
+ </tasks>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.2.1</version>
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index 498481d..7fdd0ce 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -309,9 +309,8 @@ public class CheckDescription {
private PrereqCheckType m_type;
private String m_description;
private Map<String, String> m_fails;
- public CheckDescription(String name, PrereqCheckType type, String description,
- Map<String, String> fails) {
- m_name = name;
+ public CheckDescription(String name, PrereqCheckType type, String description, Map<String, String> fails) {
+ m_name = name;
m_type = type;
m_description = description;
m_fails = fails;
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
index 4ed345c..cecf6c5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/UpgradeCheckRegistry.java
@@ -17,14 +17,24 @@
*/
package org.apache.ambari.server.checks;
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.URL;
+import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import com.google.inject.Singleton;
+
+import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.stack.UpgradePack;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.util.ClassUtils;
/**
* The {@link UpgradeCheckRegistry} contains the ordered list of all pre-upgrade
@@ -33,6 +43,7 @@ import org.apache.ambari.server.state.stack.UpgradePack;
*/
@Singleton
public class UpgradeCheckRegistry {
+ private static Logger LOG = LoggerFactory.getLogger(UpgradeCheckRegistry.class);
/**
* The list of upgrade checks to run through.
@@ -59,6 +70,71 @@ public class UpgradeCheckRegistry {
return new ArrayList<AbstractCheckDescriptor>(m_upgradeChecks);
}
+ public List<AbstractCheckDescriptor> getServiceLevelUpgradeChecks(UpgradePack upgradePack, Map<String, ServiceInfo> services) {
+ List<String> prerequisiteChecks = upgradePack.getPrerequisiteChecks();
+ List<String> missingChecks = new ArrayList<String>();
+ for (String prerequisiteCheck : prerequisiteChecks) {
+ if (!isRegistered(prerequisiteCheck)) {
+ missingChecks.add(prerequisiteCheck);
+ }
+ }
+
+ List<AbstractCheckDescriptor> checks = new ArrayList<>(missingChecks.size());
+ if (missingChecks.isEmpty()) {
+ return checks;
+ }
+
+ List<URL> urls = new ArrayList<URL>();
+ for (ServiceInfo service : services.values()) {
+ File dir = service.getChecksFolder();
+ File[] jars = dir.listFiles(new FilenameFilter() {
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.endsWith(".jar");
+ }
+ });
+ for (File jar : jars) {
+ try {
+ URL url = jar.toURI().toURL();
+ urls.add(url);
+ LOG.debug("Adding service check jar to classpath: {}", url.toString());
+ }
+ catch (Exception e) {
+ LOG.error("Failed to add service check jar to classpath: {}", jar.getAbsolutePath(), e);
+ }
+ }
+ }
+
+ ClassLoader classLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), ClassUtils.getDefaultClassLoader());
+ for (String prerequisiteCheck : missingChecks) {
+ Class<?> clazz = null;
+ try {
+ clazz = ClassUtils.resolveClassName(prerequisiteCheck, classLoader);
+ }
+ catch (IllegalArgumentException illegalArgumentException) {
+ LOG.error("Unable to find upgrade check {}", prerequisiteCheck, illegalArgumentException);
+ }
+ try {
+ if (clazz != null) {
+ AbstractCheckDescriptor upgradeCheck = (AbstractCheckDescriptor) clazz.newInstance();
+ checks.add(upgradeCheck);
+ }
+ } catch (Exception exception) {
+ LOG.error("Unable to create upgrade check {}", prerequisiteCheck, exception);
+ }
+ }
+ return checks;
+ }
+
+ private boolean isRegistered(String prerequisiteCheck) {
+ for (AbstractCheckDescriptor descriptor: m_upgradeChecks){
+ if (prerequisiteCheck.equals(descriptor.getClass().getName())){
+ return true;
+ }
+ }
+ return false;
+ }
+
/**
* Gets an ordered and filtered list of the upgrade checks.
* @param upgradePack Upgrade pack object with the list of required checks to be included
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
index 7d7b618..7e54f83 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProvider.java
@@ -44,21 +44,26 @@ import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.state.CheckHelper;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.UpgradeHelper;
import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.apache.ambari.server.state.stack.UpgradePack;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.google.inject.Provider;
-import org.apache.ambari.server.state.stack.UpgradePack;
-import org.apache.ambari.server.state.stack.upgrade.Direction;
-import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
/**
* Resource provider for pre-upgrade checks.
*/
@StaticallyInject
public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
+ private static Logger LOG = LoggerFactory.getLogger(PreUpgradeCheckResourceProvider.class);
//----- Property ID constants ---------------------------------------------
@@ -89,6 +94,9 @@ public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
@Inject
private static Provider<UpgradeHelper> upgradeHelper;
+ @Inject
+ private static CheckHelper checkHelper;
+
private static Set<String> pkPropertyIds = Collections.singleton(UPGRADE_CHECK_ID_PROPERTY_ID);
public static Set<String> propertyIds = Sets.newHashSet(
@@ -113,9 +121,6 @@ public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
}
};
- @Inject
- private static CheckHelper checkHelper;
-
/**
* Constructor.
*
@@ -185,10 +190,18 @@ public class PreUpgradeCheckResourceProvider extends ReadOnlyResourceProvider {
}
// ToDo: properly handle exceptions, i.e. create fake check with error description
-
List<AbstractCheckDescriptor> upgradeChecksToRun = upgradeCheckRegistry.getFilteredUpgradeChecks(upgradePack);
upgradeCheckRequest.setPrerequisiteCheckConfig(upgradePack.getPrerequisiteCheckConfig());
+ try {
+ // Register all the custom prechecks from the services
+ Map<String, ServiceInfo> services = getManagementController().getAmbariMetaInfo().getServices(stackName, upgradePack.getTarget());
+ List<AbstractCheckDescriptor> serviceLevelUpgradeChecksToRun = upgradeCheckRegistry.getServiceLevelUpgradeChecks(upgradePack, services);
+ upgradeChecksToRun.addAll(serviceLevelUpgradeChecksToRun);
+ } catch (AmbariException ambariException) {
+ LOG.error("Unable to register all the custom prechecks from the services", ambariException);
+ }
+
for (PrerequisiteCheck prerequisiteCheck : checkHelper.performChecks(upgradeCheckRequest, upgradeChecksToRun)) {
final Resource resource = new ResourceImpl(Resource.Type.PreUpgradeCheck);
setResourceProperty(resource, UPGRADE_CHECK_ID_PROPERTY_ID, prerequisiteCheck.getId(), requestedIds);
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/stack/CommonServiceDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/CommonServiceDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/CommonServiceDirectory.java
index cdedbb4..40e7105 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/CommonServiceDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/CommonServiceDirectory.java
@@ -19,8 +19,6 @@
package org.apache.ambari.server.stack;
import org.apache.ambari.server.AmbariException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import java.io.File;
@@ -28,10 +26,6 @@ import java.io.File;
* Encapsulates IO operations on a common services directory.
*/
public class CommonServiceDirectory extends ServiceDirectory {
- /**
- * logger instance
- */
- private static final Logger LOG = LoggerFactory.getLogger(CommonServiceDirectory.class);
/**
* Constructor.
@@ -62,36 +56,30 @@ public class CommonServiceDirectory extends ServiceDirectory {
@Override
/**
- * Calculate the common service directories
- * packageDir Format: common-services/<serviceName>/<serviceVersion>/package
- * Example:
- * directory: "/var/lib/ambari-server/resources/common-services/HDFS/1.0"
- * packageDir: "common-services/HDFS/1.0/package"
+ * @return the service name-version (will be used for logging purposes by superclass)
*/
- protected void calculateDirectories() {
+ public String getService() {
File serviceVersionDir = new File(getAbsolutePath());
File serviceDir = serviceVersionDir.getParentFile();
- String serviceId = String.format("%s/%s", serviceDir.getName(), serviceVersionDir.getName());
+ String service = String.format("%s-%s", serviceDir.getName(), serviceVersionDir.getName());
+ return service;
+ }
- File absPackageDir = new File(getAbsolutePath() + File.separator + PACKAGE_FOLDER_NAME);
- if(absPackageDir.isDirectory()) {
- packageDir = absPackageDir.getPath().substring(serviceDir.getParentFile().getParentFile().getPath().length() + 1);
- LOG.debug(String.format("Service package folder for common service %s has been resolved to %s",
- serviceId, packageDir));
- } else {
- LOG.debug(String.format("Service package folder %s for common service %s does not exist.",
- absPackageDir, serviceId ));
- }
+ @Override
+ /**
+ * @return the resources directory
+ */
+ protected File getResourcesDirectory() {
+ File serviceVersionDir = new File(getAbsolutePath());
+ return serviceVersionDir.getParentFile().getParentFile().getParentFile();
+ }
- File absUpgradesDir = new File(getAbsolutePath() + File.separator + UPGRADES_FOLDER_NAME);
- if(absUpgradesDir.isDirectory()) {
- upgradesDir = absUpgradesDir;
- LOG.debug(String.format("Service upgrades folder for common service %s has been resolved to %s",
- serviceId, upgradesDir));
- } else {
- LOG.debug(String.format("Service upgrades folder %s for common service %s does not exist.",
- absUpgradesDir, serviceId ));
- }
+ @Override
+ /**
+ * @return the text common-services (will be used for logging purposes by superclass)
+ */
+ public String getStack() {
+ return "common-services";
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
index 30663a3..00dc046 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceDirectory.java
@@ -88,6 +88,11 @@ public abstract class ServiceDirectory extends StackDefinitionDirectory {
protected File upgradesDir;
/**
+ * checks directory path
+ */
+ protected File checksDir;
+
+ /**
* service metainfo file object representation
*/
private ServiceMetainfoXml metaInfoXml;
@@ -108,6 +113,11 @@ public abstract class ServiceDirectory extends StackDefinitionDirectory {
protected static final String UPGRADES_FOLDER_NAME = "upgrades";
/**
+ * checks directory name
+ */
+ protected static final String CHECKS_FOLDER_NAME = "checks";
+
+ /**
* service metainfo file name
*/
private static final String SERVICE_METAINFO_FILE_NAME = "metainfo.xml";
@@ -153,6 +163,15 @@ public abstract class ServiceDirectory extends StackDefinitionDirectory {
}
/**
+ * Obtain the checks directory path.
+ *
+ * @return checks directory path
+ */
+ public File getChecksDir() {
+ return checksDir;
+ }
+
+ /**
* Obtain the metrics file.
*
* @return metrics file
@@ -235,7 +254,7 @@ public abstract class ServiceDirectory extends StackDefinitionDirectory {
* Parse the service directory.
*/
protected void parsePath() throws AmbariException {
- calculateDirectories();
+ calculateDirectories(getStack(), getService());
parseMetaInfoFile();
File af = new File(directory, AmbariMetaInfo.SERVICE_ALERT_FILE_NAME);
@@ -265,12 +284,101 @@ public abstract class ServiceDirectory extends StackDefinitionDirectory {
File themeFile = new File(directory, AmbariMetaInfo.SERVICE_THEME_FILE_NAME);
this.themeFile = themeFile.exists() ? themeFile : null;
+
+ File checksFile = new File(directory, AmbariMetaInfo.SERVICE_THEME_FILE_NAME);
+ this.themeFile = themeFile.exists() ? themeFile : null;
}
/**
+ * @return the service identifier required. ex: service name for stack services or the service/version for common services
+ */
+ public abstract String getService();
+
+ /**
+ * @return the stack name/version or common-services
+ */
+ public abstract String getStack();
+
+ /**
* Calculate the service specific directories.
*/
- protected abstract void calculateDirectories();
+ protected void calculateDirectories(String stack, String service) {
+ calculatePackageDirectory(stack, service);
+ calculateUpgradesDirectory(stack, service);
+ calculateChecksDirectory(stack, service);
+ }
+
+ /**
+ * @param directoryName
+ * @param stack
+ * @param service
+ * @return the directory if it exists and is not empty
+ */
+ protected File resolveDirectory(String directoryName, String stack, String service) {
+ File directory = new File(getAbsolutePath() + File.separator + directoryName);
+ if (directory.isDirectory()) {
+ String[] files = directory.list();
+ int fileCount = files.length;
+ if (fileCount > 0) {
+ LOG.debug("Service {} folder for service {} in {} has been resolved to {}", directoryName, service, stack, directory);
+ return directory;
+ }
+ else {
+ LOG.debug("Service folder {} is empty.", directory);
+ }
+ }
+ else {
+ LOG.debug("Service folder {}does not exist.", directory);
+ }
+ return null;
+ }
+
+ /**
+ * @param directoryName
+ * @param stack
+ * @param service
+ * @return the relative path of the directory if it exists and is not empty
+ */
+ protected String resolveRelativeDirectoryPathString(File resourcesDir, String directoryName, String stack, String service) {
+ File dir = resolveDirectory(directoryName, stack, service);
+ if (dir != null) {
+ return dir.getPath().substring(resourcesDir.getPath().length() + 1);
+ }
+ return null;
+ }
+
+ /**
+ * @return the resources directory
+ */
+ protected abstract File getResourcesDirectory();
+
+ /**
+ * Sets the packageDir if the path exists and is not empty
+ * @param stack
+ * @param service
+ */
+ protected void calculatePackageDirectory(String stack, String service) {
+ packageDir = resolveRelativeDirectoryPathString(getResourcesDirectory(), PACKAGE_FOLDER_NAME, stack, service);
+
+ }
+
+ /**
+ * Sets the upgradesDir if the dir exists and is not empty
+ * @param stack
+ * @param service
+ */
+ protected void calculateUpgradesDirectory(String stack, String service) {
+ upgradesDir = resolveDirectory(UPGRADES_FOLDER_NAME, stack, service);
+ }
+
+ /**
+ * Sets the checksDir if the dir exists and is not empty
+ * @param stack
+ * @param service
+ */
+ protected void calculateChecksDirectory(String stack, String service) {
+ checksDir = resolveDirectory(CHECKS_FOLDER_NAME, stack, service);
+ }
/**
* Unmarshal the metainfo file into its object representation.
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
index a77a22f..650bdf1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ServiceModule.java
@@ -142,6 +142,7 @@ public class ServiceModule extends BaseModule<ServiceModule, ServiceInfo> implem
serviceInfo.setSchemaVersion(AmbariMetaInfo.SCHEMA_VERSION_2);
serviceInfo.setServicePackageFolder(serviceDirectory.getPackageDir());
serviceInfo.setServiceUpgradesFolder(serviceDirectory.getUpgradesDir());
+ serviceInfo.setChecksFolder(serviceDirectory.getChecksDir());
serviceInfo.setAdvisorFile(serviceDirectory.getAdvisorFile());
serviceInfo.setAdvisorName(serviceDirectory.getAdvisorName(serviceInfo.getName()));
@@ -253,6 +254,9 @@ public class ServiceModule extends BaseModule<ServiceModule, ServiceInfo> implem
if (serviceInfo.getRoleCommandOrder() == null) {
serviceInfo.setRoleCommandOrder(parent.getRoleCommandOrder());
}
+ if (serviceInfo.getChecksFolder() == null) {
+ serviceInfo.setChecksFolder(parent.getChecksFolder());
+ }
mergeCustomCommands(parent.getCustomCommands(), serviceInfo.getCustomCommands());
mergeConfigDependencies(parent);
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
index 8656896..611b6bd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
@@ -28,12 +28,19 @@ import org.apache.ambari.server.state.stack.RepositoryXml;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
/**
* Encapsulates IO operations on a stack service directory.
*/
public class StackServiceDirectory extends ServiceDirectory {
/**
+ * logger instance
+ */
+ private static final Logger LOG = LoggerFactory.getLogger(StackServiceDirectory.class);
+
+
+ /**
* repository file
*/
@Nullable
@@ -45,12 +52,6 @@ public class StackServiceDirectory extends ServiceDirectory {
@Nullable
private String repoDir;
-
- /**
- * logger instance
- */
- private static final Logger LOG = LoggerFactory.getLogger(StackServiceDirectory.class);
-
/**
* Constructor.
*
@@ -61,7 +62,6 @@ public class StackServiceDirectory extends ServiceDirectory {
super(servicePath);
}
-
/**
* Obtain the repository xml file if exists or null
*
@@ -82,7 +82,6 @@ public class StackServiceDirectory extends ServiceDirectory {
return repoDir;
}
-
@Override
/**
* Obtain the advisor name.
@@ -129,55 +128,35 @@ public class StackServiceDirectory extends ServiceDirectory {
@Override
/**
- * Calculate the stack service directories.
- * packageDir Format: stacks/<stackName>/<stackVersion>/services/<serviceName>/package
- * Example:
- * directory: "/var/lib/ambari-server/resources/stacks/HDP/2.0.6/services/HDFS"
- * packageDir: "stacks/HDP/2.0.6/services/HDFS/package"
+ * @return the resources directory
*/
- protected void calculateDirectories() {
+ protected File getResourcesDirectory() {
File serviceDir = new File(getAbsolutePath());
- File stackVersionDir = serviceDir.getParentFile().getParentFile();
- File stackDir = stackVersionDir.getParentFile();
+ return serviceDir.getParentFile().getParentFile().getParentFile().getParentFile().getParentFile();
+ }
- String stackId = String.format("%s-%s", stackDir.getName(), stackVersionDir.getName());
- File absPackageDir = new File(getAbsolutePath() + File.separator + PACKAGE_FOLDER_NAME);
- if (absPackageDir.isDirectory()) {
- String[] files = absPackageDir.list();
- int fileCount = files.length;
- if (fileCount > 0) {
- packageDir = absPackageDir.getPath().substring(stackDir.getParentFile().getParentFile().getPath().length() + 1);
- LOG.debug("Service package folder for service {} for stack {} has been resolved to {}",
- serviceDir.getName(), stackId, packageDir);
- }
- else {
- LOG.debug("Service package folder {} for service {} for stack {} is empty.",
- absPackageDir, serviceDir.getName(), stackId);
- }
- } else {
- LOG.debug("Service package folder {} for service {} for stack {} does not exist.",
- absPackageDir, serviceDir.getName(), stackId);
- }
+ @Override
+ /**
+ * @return the service name (will be used for logging purposes by superclass)
+ */
+ public String getService() {
+ File serviceDir = new File(getAbsolutePath());
- File absUpgradesDir = new File(getAbsolutePath() + File.separator + UPGRADES_FOLDER_NAME);
- if (absUpgradesDir.isDirectory()) {
- String[] files = absUpgradesDir.list();
- int fileCount = files.length;
- if (fileCount > 0) {
- upgradesDir = absUpgradesDir;
- LOG.debug("Service upgrades folder for service {} for stack {} has been resolved to {}",
- serviceDir.getName(), stackId, packageDir);
- }
- else {
- LOG.debug("Service upgrades folder {} for service {} for stack {} is empty.",
- absUpgradesDir, serviceDir.getName(), stackId);
- }
- } else {
- LOG.debug("Service upgrades folder {} for service {} for stack {} does not exist.",
- absUpgradesDir, serviceDir.getName(), stackId);
- }
+ return serviceDir.getName();
}
+ @Override
+ /**
+ * @return the stack name-version (will be used for logging purposes by superclass)
+ */
+ public String getStack() {
+ File serviceDir = new File(getAbsolutePath());
+ File stackVersionDir = serviceDir.getParentFile().getParentFile();
+ File stackDir = stackVersionDir.getParentFile();
+
+ String stackId = String.format("%s-%s", stackDir.getName(), stackVersionDir.getName());
+ return stackId;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index 6fda8bc..b0d81c3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -270,6 +270,12 @@ public class ServiceInfo implements Validable{
@XmlTransient
private File serviceUpgradesFolder;
+ /**
+ * Stores the path to the checks folder which contains prereq check jars for the given service.
+ */
+ @XmlTransient
+ private File checksFolder;
+
public boolean isDeleted() {
return isDeleted;
}
@@ -600,6 +606,14 @@ public String getVersion() {
this.serviceUpgradesFolder = serviceUpgradesFolder;
}
+ public File getChecksFolder() {
+ return checksFolder;
+ }
+
+ public void setChecksFolder(File checksFolder) {
+ this.checksFolder = checksFolder;
+ }
+
/**
* Exposes (and initializes on first use) map of os-specific details.
* @return map of OS specific details keyed by family
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
new file mode 100644
index 0000000..6a0fa12
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/PreUpgradeCheckResourceProviderTest.java
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.ActionDBAccessor;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.checks.AbstractCheckDescriptor;
+import org.apache.ambari.server.checks.UpgradeCheckRegistry;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.MaintenanceStateHelper;
+import org.apache.ambari.server.controller.RequestStatusResponse;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.events.jpa.EntityManagerCacheInvalidationEvent;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.scheduler.ExecutionScheduler;
+import org.apache.ambari.server.stack.StackManagerFactory;
+import org.apache.ambari.server.state.CheckHelper;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.UpgradeHelper;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrereqCheckType;
+import org.apache.ambari.server.state.stack.UpgradePack;
+import org.apache.ambari.server.state.stack.UpgradePack.PrerequisiteCheckConfig;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.File;
+import java.lang.reflect.Field;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import javax.persistence.EntityManager;
+
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Provider;
+
+import static org.easymock.EasyMock.anyBoolean;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.isNull;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+/**
+ * PreUpgradeCheckResourceProvider tests.
+ */
+public class PreUpgradeCheckResourceProviderTest {
+
+ @Test
+ public void testGetResources() throws Exception{
+ Injector injector = createInjector();
+ AmbariManagementController managementController = injector.getInstance(AmbariManagementController.class);
+
+ Clusters clusters = injector.getInstance(Clusters.class);
+ UpgradeHelper upgradeHelper = injector.getInstance(UpgradeHelper.class);
+
+ RepositoryVersionDAO repoDao = injector.getInstance(RepositoryVersionDAO.class);
+ RepositoryVersionEntity repo = createNiceMock(RepositoryVersionEntity.class);
+ UpgradePack upgradePack = createNiceMock(UpgradePack.class);
+ PrerequisiteCheckConfig config = createNiceMock(PrerequisiteCheckConfig.class);
+
+ Cluster cluster = createNiceMock(Cluster.class);
+ Service service = createNiceMock(Service.class);
+ ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
+
+ StackId currentStackId = createNiceMock(StackId.class);
+ StackId targetStackId = createNiceMock(StackId.class);
+ ServiceFactory serviceFactory = createNiceMock(ServiceFactory.class);
+ AmbariMetaInfo ambariMetaInfo = createNiceMock(AmbariMetaInfo.class);
+
+ Map<String, Service> allServiceMap = new HashMap<String, Service>();
+ allServiceMap.put("Service100", service);
+ Map<String, ServiceInfo> allServiceInfoMap = new HashMap<String, ServiceInfo>();
+ allServiceInfoMap.put("Service100", serviceInfo);
+
+ // set expectations
+ expect(managementController.getClusters()).andReturn(clusters).anyTimes();
+ expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes();
+ expect(managementController.getServiceFactory()).andReturn(serviceFactory).anyTimes();
+
+ expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
+ expect(cluster.getServices()).andReturn(allServiceMap).anyTimes();
+ expect(cluster.getService("Service100")).andReturn(service).anyTimes();
+ expect(cluster.getCurrentStackVersion()).andReturn(currentStackId).anyTimes();
+
+ expect(currentStackId.getStackName()).andReturn("Stack100").anyTimes();
+ expect(currentStackId.getStackVersion()).andReturn("1.0").anyTimes();
+ expect(targetStackId.getStackName()).andReturn("Stack100").anyTimes();
+ expect(targetStackId.getStackVersion()).andReturn("1.1").anyTimes();
+
+ expect(repoDao.findByStackNameAndVersion("Stack100", "Repo100")).andReturn(repo).anyTimes();
+ expect(repo.getStackId()).andReturn(targetStackId).atLeastOnce();
+ expect(upgradeHelper.suggestUpgradePack("Cluster100", "1.0", "Repo100", Direction.UPGRADE, UpgradeType.NON_ROLLING, "upgrade_pack11")).andReturn(upgradePack);
+
+ List<AbstractCheckDescriptor> upgradeChecksToRun = new LinkedList<AbstractCheckDescriptor>();
+ List<String> prerequisiteChecks = new LinkedList<String>();
+ prerequisiteChecks.add("org.apache.ambari.server.sample.checks.SampleServiceCheck");
+ expect(upgradePack.getPrerequisiteCheckConfig()).andReturn(config);
+ expect(upgradePack.getPrerequisiteChecks()).andReturn(prerequisiteChecks).anyTimes();
+ expect(upgradePack.getTarget()).andReturn("1.1").anyTimes();
+
+ expect(ambariMetaInfo.getServices("Stack100", "1.1")).andReturn(allServiceInfoMap).anyTimes();
+ String checks = ClassLoader.getSystemClassLoader().getResource("checks").getPath();
+ expect(serviceInfo.getChecksFolder()).andReturn(new File(checks));
+
+ // replay
+ replay(managementController, clusters, cluster, service, serviceInfo, repoDao, repo, upgradeHelper,
+ ambariMetaInfo, upgradePack, config, currentStackId, targetStackId, serviceFactory);
+
+ ResourceProvider provider = getPreUpgradeCheckResourceProvider(managementController, injector);
+ // create the request
+ Request request = PropertyHelper.getReadRequest(new HashSet<String>());
+ PredicateBuilder builder = new PredicateBuilder();
+ Predicate predicate = builder.property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
+ .property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_UPGRADE_PACK_PROPERTY_ID).equals("upgrade_pack11").and()
+ .property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_UPGRADE_TYPE_PROPERTY_ID).equals(UpgradeType.NON_ROLLING).and()
+ .property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_REPOSITORY_VERSION_PROPERTY_ID).equals("Repo100").toPredicate();
+
+
+ System.out.println("PreUpgradeCheckResourceProvider - " + provider);
+ Set<Resource> resources = Collections.emptySet();
+ try {
+ resources = provider.getResources(request, predicate);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ Assert.assertEquals(1, resources.size());
+ for (Resource resource : resources) {
+ String id = (String) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_ID_PROPERTY_ID);
+ Assert.assertEquals("SAMPLE_SERVICE_CHECK", id);
+ String description = (String) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CHECK_PROPERTY_ID);
+ Assert.assertEquals("Sample service check description.", description);
+ PrereqCheckStatus status = (PrereqCheckStatus) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_STATUS_PROPERTY_ID);
+ Assert.assertEquals(PrereqCheckStatus.FAIL, status);
+ String reason = (String) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_REASON_PROPERTY_ID);
+ Assert.assertEquals("Sample service check always fails.", reason);
+ PrereqCheckType checkType = (PrereqCheckType) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CHECK_TYPE_PROPERTY_ID);
+ Assert.assertEquals(PrereqCheckType.HOST, checkType);
+ String clusterName = (String) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CLUSTER_NAME_PROPERTY_ID);
+ Assert.assertEquals("Cluster100", clusterName);
+ UpgradeType upgradeType = (UpgradeType) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_UPGRADE_TYPE_PROPERTY_ID);
+ Assert.assertEquals(UpgradeType.NON_ROLLING, upgradeType);
+ }
+
+ // verify
+ verify(managementController, clusters, cluster, service, serviceInfo, repoDao, repo, upgradeHelper,
+ ambariMetaInfo, upgradePack, config, currentStackId, targetStackId, serviceFactory);
+ }
+
+ /**
+ * This factory method creates PreUpgradeCheckResourceProvider using the mock managementController
+ */
+ public PreUpgradeCheckResourceProvider getPreUpgradeCheckResourceProvider(AmbariManagementController managementController, Injector injector) throws AmbariException {
+ //UpgradeHelper upgradeHelper = injector.getInstance(UpgradeHelper.class);
+ //injector.injectMembers(upgradeHelper);
+ PreUpgradeCheckResourceProvider provider = new PreUpgradeCheckResourceProvider(managementController);
+ return provider;
+ }
+
+ static class TestClustersProvider implements Provider<Clusters> {
+ private static Clusters clusters = createNiceMock(Clusters.class);
+
+ @Override
+ public Clusters get() {
+ return clusters;
+ }
+ }
+
+ static class TestUpgradeHelperProvider implements Provider<UpgradeHelper> {
+ private static UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class);
+
+ @Override
+ public UpgradeHelper get() {
+ return upgradeHelper;
+ }
+ }
+
+ private Injector createInjector() throws Exception {
+ return Guice.createInjector(new AbstractModule() {
+ @Override
+ protected void configure() {
+ Provider<Clusters> clustersProvider = new TestClustersProvider();
+ Provider<UpgradeHelper> upgradeHelperProvider = new TestUpgradeHelperProvider();
+ CheckHelper checkHelper = new CheckHelper();
+ UpgradeCheckRegistry registry = new UpgradeCheckRegistry();
+
+ bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class));
+ bind(CheckHelper.class).toInstance(checkHelper);
+ bind(Clusters.class).toProvider(TestClustersProvider.class);
+ bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+ bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+ bind(RepositoryVersionDAO.class).toInstance(createNiceMock(RepositoryVersionDAO.class));
+ bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+ bind(UpgradeCheckRegistry.class).toInstance(registry);
+ bind(UpgradeHelper.class).toProvider(TestUpgradeHelperProvider.class);
+
+ requestStaticInjection(PreUpgradeCheckResourceProvider.class);
+ }
+ });
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java b/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
new file mode 100644
index 0000000..c91793e
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
@@ -0,0 +1,35 @@
+package org.apache.ambari.server.sample.checks;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.checks.AbstractCheckDescriptor;
+import org.apache.ambari.server.checks.CheckDescription;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrereqCheckType;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+
+import com.google.common.collect.ImmutableMap;
+
+public class SampleServiceCheck extends AbstractCheckDescriptor {
+
+ public SampleServiceCheck() {
+ super(new CheckDescription("SAMPLE_SERVICE_CHECK",
+ PrereqCheckType.HOST,
+ "Sample service check description.",
+ new ImmutableMap.Builder<String, String>()
+ .put(AbstractCheckDescriptor.DEFAULT,
+ "Sample service check default property description.").build()));
+ }
+
+ @Override
+ public void perform(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest request) throws AmbariException {
+ prerequisiteCheck.setFailReason("Sample service check always fails.");
+ prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
+ }
+
+ @Override
+ public boolean isStackUpgradeAllowedToBypassPreChecks() {
+ return false;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
index 304fd5c..a9a8fdb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
@@ -437,6 +437,36 @@ public class ServiceModuleTest {
}
@Test
+ public void testResolve_UpgradeCheckDirectory() throws Exception {
+ File checks = new File("checks");
+
+ // check directory specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ ServiceModule child = createServiceModule(info);
+ ServiceModule parent = createServiceModule(parentInfo);
+ child.getModuleInfo().setChecksFolder(checks);
+ resolveService(child, parent);
+ assertEquals(checks.getPath(), child.getModuleInfo().getChecksFolder().getPath());
+
+ // check directory specified in parent only
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setChecksFolder(checks);
+ resolveService(child, parent);
+ assertEquals(checks.getPath(), child.getModuleInfo().getChecksFolder().getPath());
+
+ // check directory set in both
+ info.setChecksFolder(checks);
+ child = createServiceModule(info);
+ child.getModuleInfo().setChecksFolder(checks);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setChecksFolder(new File("other"));
+ resolveService(child, parent);
+ assertEquals(checks.getPath(), child.getModuleInfo().getChecksFolder().getPath());
+ }
+
+ @Test
public void testResolve_CustomCommands() throws Exception {
List<CustomCommandDefinition> customCommands = new ArrayList<CustomCommandDefinition>();
CustomCommandDefinition cmd1 = new CustomCommandDefinition();
http://git-wip-us.apache.org/repos/asf/ambari/blob/87423d64/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
index 659ae12..044f2c4 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
@@ -29,6 +29,7 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
+import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -105,6 +106,9 @@ public class StackManagerExtensionTest {
assertNotNull("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder());
assertTrue("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder().contains("extensions/EXT/0.1/services/OOZIE2/package"));
assertEquals(oozie.getVersion(), "3.2.0");
+ File checks = oozie.getChecksFolder();
+ assertNotNull(checks);
+ assertTrue("Checks dir is " + checks.getPath(), checks.getPath().contains("extensions/EXT/0.1/services/OOZIE2/checks"));
extension = stackManager.getExtension("EXT", "0.2");
assertNotNull("EXT 0.2's parent: " + extension.getParentExtensionVersion(), extension.getParentExtensionVersion());
@@ -114,6 +118,9 @@ public class StackManagerExtensionTest {
assertNotNull("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder());
assertTrue("Package dir is " + oozie.getServicePackageFolder(), oozie.getServicePackageFolder().contains("extensions/EXT/0.1/services/OOZIE2/package"));
assertEquals(oozie.getVersion(), "4.0.0");
+ checks = oozie.getChecksFolder();
+ assertNotNull(checks);
+ assertTrue("Checks dir is " + checks.getPath(), checks.getPath().contains("extensions/EXT/0.1/services/OOZIE2/checks"));
StackInfo stack = stackManager.getStack("HDP", "0.2");
assertNotNull(stack.getService("OOZIE2"));
[11/19] ambari git commit: AMBARI-18471. Refactor yarn() function in
YARN service. Part 1. (aonishuk)
Posted by nc...@apache.org.
AMBARI-18471. Refactor yarn() function in YARN service. Part 1. (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6fb1ceef
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6fb1ceef
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6fb1ceef
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6fb1ceef66b8d63c66603eebf1cd9e6d91c815aa
Parents: c265ae6
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Sep 28 08:44:44 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Sep 28 08:44:44 2016 +0300
----------------------------------------------------------------------
.../2.1.0.2.0/package/scripts/params_linux.py | 7 +
.../YARN/2.1.0.2.0/package/scripts/yarn.py | 548 +++++++++----------
.../stacks/2.0.6/YARN/test_historyserver.py | 1 -
.../stacks/2.0.6/YARN/test_resourcemanager.py | 54 +-
.../stacks/2.1/YARN/test_apptimelineserver.py | 40 +-
.../test/python/stacks/2.3/YARN/test_ats_1_5.py | 188 +++----
6 files changed, 421 insertions(+), 417 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb1ceef/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 4d42861..0d46069 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -193,6 +193,13 @@ else:
rm_webui_address = format("{rm_host}:{rm_port}")
rm_webui_https_address = format("{rm_host}:{rm_https_port}")
+if security_enabled:
+ tc_mode = 0644
+ tc_owner = "root"
+else:
+ tc_mode = None
+ tc_owner = hdfs_user
+
nm_webui_address = config['configurations']['yarn-site']['yarn.nodemanager.webapp.address']
hs_webui_address = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.address']
nm_address = config['configurations']['yarn-site']['yarn.nodemanager.address'] # still contains 0.0.0.0
http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb1ceef/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index a3a3a06..70ed5b3 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -37,63 +37,6 @@ from ambari_commons import OSConst
from resource_management.libraries.functions.mounted_dirs_helper import handle_mounted_dirs
-# Local Imports
-
-
-@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def yarn(name = None):
- import params
- XmlConfig("mapred-site.xml",
- conf_dir=params.config_dir,
- configurations=params.config['configurations']['mapred-site'],
- owner=params.yarn_user,
- mode='f'
- )
- XmlConfig("yarn-site.xml",
- conf_dir=params.config_dir,
- configurations=params.config['configurations']['yarn-site'],
- owner=params.yarn_user,
- mode='f',
- configuration_attributes=params.config['configuration_attributes']['yarn-site']
- )
- XmlConfig("capacity-scheduler.xml",
- conf_dir=params.config_dir,
- configurations=params.config['configurations']['capacity-scheduler'],
- owner=params.yarn_user,
- mode='f'
- )
-
- if params.service_map.has_key(name):
- service_name = params.service_map[name]
-
- ServiceConfig(service_name,
- action="change_user",
- username = params.yarn_user,
- password = Script.get_password(params.yarn_user))
-
-def create_log_dir(dir_name):
- import params
- Directory(dir_name,
- create_parents = True,
- cd_access="a",
- mode=0775,
- owner=params.yarn_user,
- group=params.user_group,
- ignore_failures=True,
- )
-
-def create_local_dir(dir_name):
- import params
- Directory(dir_name,
- create_parents = True,
- cd_access="a",
- mode=0755,
- owner=params.yarn_user,
- group=params.user_group,
- ignore_failures=True,
- recursive_mode_flags = {'f': 'a+rw', 'd': 'a+rwx'},
- )
-
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def yarn(name=None, config_dir=None):
"""
@@ -102,107 +45,18 @@ def yarn(name=None, config_dir=None):
"""
import params
+ if name == 'resourcemanager':
+ setup_resourcemanager()
+ elif name == 'nodemanager':
+ setup_nodemanager()
+ elif name == 'apptimelineserver':
+ setup_ats()
+ elif name == 'historyserver':
+ setup_historyserver()
+
if config_dir is None:
config_dir = params.hadoop_conf_dir
- if name == "historyserver":
- if params.yarn_log_aggregation_enabled:
- params.HdfsResource(params.yarn_nm_app_log_dir,
- action="create_on_execute",
- type="directory",
- owner=params.yarn_user,
- group=params.user_group,
- mode=01777,
- recursive_chmod=True
- )
-
- # create the /tmp folder with proper permissions if it doesn't exist yet
- if params.entity_file_history_directory.startswith('/tmp'):
- params.HdfsResource(params.hdfs_tmp_dir,
- action="create_on_execute",
- type="directory",
- owner=params.hdfs_user,
- mode=0777,
- )
-
- params.HdfsResource(params.entity_file_history_directory,
- action="create_on_execute",
- type="directory",
- owner=params.yarn_user,
- group=params.user_group
- )
- params.HdfsResource("/mapred",
- type="directory",
- action="create_on_execute",
- owner=params.mapred_user
- )
- params.HdfsResource("/mapred/system",
- type="directory",
- action="create_on_execute",
- owner=params.hdfs_user
- )
- params.HdfsResource(params.mapreduce_jobhistory_done_dir,
- type="directory",
- action="create_on_execute",
- owner=params.mapred_user,
- group=params.user_group,
- change_permissions_for_parents=True,
- mode=0777
- )
- params.HdfsResource(None, action="execute")
- Directory(params.jhs_leveldb_state_store_dir,
- owner=params.mapred_user,
- group=params.user_group,
- create_parents = True,
- cd_access="a",
- recursive_ownership = True,
- )
-
- #<editor-fold desc="Node Manager Section">
- if name == "nodemanager":
-
- # First start after enabling/disabling security
- if params.toggle_nm_security:
- Directory(params.nm_local_dirs_list + params.nm_log_dirs_list,
- action='delete'
- )
-
- # If yarn.nodemanager.recovery.dir exists, remove this dir
- if params.yarn_nodemanager_recovery_dir:
- Directory(InlineTemplate(params.yarn_nodemanager_recovery_dir).get_content(),
- action='delete'
- )
-
- # Setting NM marker file
- if params.security_enabled:
- Directory(params.nm_security_marker_dir)
- File(params.nm_security_marker,
- content="Marker file to track first start after enabling/disabling security. "
- "During first start yarn local, log dirs are removed and recreated"
- )
- elif not params.security_enabled:
- File(params.nm_security_marker, action="delete")
-
-
- if not params.security_enabled or params.toggle_nm_security:
- # handle_mounted_dirs ensures that we don't create dirs which are temporary unavailable (unmounted), and intended to reside on a different mount.
- nm_log_dir_to_mount_file_content = handle_mounted_dirs(create_log_dir, params.nm_log_dirs, params.nm_log_dir_to_mount_file, params)
- # create a history file used by handle_mounted_dirs
- File(params.nm_log_dir_to_mount_file,
- owner=params.hdfs_user,
- group=params.user_group,
- mode=0644,
- content=nm_log_dir_to_mount_file_content
- )
- nm_local_dir_to_mount_file_content = handle_mounted_dirs(create_local_dir, params.nm_local_dirs, params.nm_local_dir_to_mount_file, params)
- File(params.nm_local_dir_to_mount_file,
- owner=params.hdfs_user,
- group=params.user_group,
- mode=0644,
- content=nm_local_dir_to_mount_file_content
- )
- #</editor-fold>
-
if params.yarn_nodemanager_recovery_dir:
Directory(InlineTemplate(params.yarn_nodemanager_recovery_dir).get_content(),
owner=params.yarn_user,
@@ -245,15 +99,14 @@ def yarn(name=None, config_dir=None):
# During RU, Core Masters and Slaves need hdfs-site.xml
# TODO, instead of specifying individual configs, which is susceptible to breaking when new configs are added,
# RU should rely on all available in <stack-root>/<version>/hadoop/conf
- if 'hdfs-site' in params.config['configurations']:
- XmlConfig("hdfs-site.xml",
- conf_dir=config_dir,
- configurations=params.config['configurations']['hdfs-site'],
- configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
- owner=params.hdfs_user,
- group=params.user_group,
- mode=0644
- )
+ XmlConfig("hdfs-site.xml",
+ conf_dir=config_dir,
+ configurations=params.config['configurations']['hdfs-site'],
+ configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
+ owner=params.hdfs_user,
+ group=params.user_group,
+ mode=0644
+ )
XmlConfig("mapred-site.xml",
conf_dir=config_dir,
@@ -282,85 +135,6 @@ def yarn(name=None, config_dir=None):
mode=0644
)
- if name == 'resourcemanager':
- Directory(params.rm_nodes_exclude_dir,
- mode=0755,
- create_parents=True,
- cd_access='a',
- )
- File(params.rm_nodes_exclude_path,
- owner=params.yarn_user,
- group=params.user_group
- )
- File(params.yarn_job_summary_log,
- owner=params.yarn_user,
- group=params.user_group
- )
- if not is_empty(params.node_label_enable) and params.node_label_enable or is_empty(params.node_label_enable) and params.node_labels_dir:
- params.HdfsResource(params.node_labels_dir,
- type="directory",
- action="create_on_execute",
- change_permissions_for_parents=True,
- owner=params.yarn_user,
- group=params.user_group,
- mode=0700
- )
- params.HdfsResource(None, action="execute")
-
-
- elif name == 'apptimelineserver':
- Directory(params.ats_leveldb_dir,
- owner=params.yarn_user,
- group=params.user_group,
- create_parents = True,
- cd_access="a",
- )
-
- # if stack support application timeline-service state store property (timeline_state_store stack feature)
- if params.stack_supports_timeline_state_store:
- Directory(params.ats_leveldb_state_store_dir,
- owner=params.yarn_user,
- group=params.user_group,
- create_parents = True,
- cd_access="a",
- )
- # app timeline server 1.5 directories
- if not is_empty(params.entity_groupfs_store_dir):
- parent_path = os.path.dirname(params.entity_groupfs_store_dir)
- params.HdfsResource(parent_path,
- type="directory",
- action="create_on_execute",
- change_permissions_for_parents=True,
- owner=params.yarn_user,
- group=params.user_group,
- mode=0755
- )
- params.HdfsResource(params.entity_groupfs_store_dir,
- type="directory",
- action="create_on_execute",
- owner=params.yarn_user,
- group=params.user_group,
- mode=params.entity_groupfs_store_dir_mode
- )
- if not is_empty(params.entity_groupfs_active_dir):
- parent_path = os.path.dirname(params.entity_groupfs_active_dir)
- params.HdfsResource(parent_path,
- type="directory",
- action="create_on_execute",
- change_permissions_for_parents=True,
- owner=params.yarn_user,
- group=params.user_group,
- mode=0755
- )
- params.HdfsResource(params.entity_groupfs_active_dir,
- type="directory",
- action="create_on_execute",
- owner=params.yarn_user,
- group=params.user_group,
- mode=params.entity_groupfs_active_dir_mode
- )
- params.HdfsResource(None, action="execute")
-
File(format("{limits_conf_dir}/yarn.conf"),
mode=0644,
content=Template('yarn.conf.j2')
@@ -378,8 +152,7 @@ def yarn(name=None, config_dir=None):
content=InlineTemplate(params.yarn_env_sh_template)
)
- container_executor = format("{yarn_container_bin}/container-executor")
- File(container_executor,
+ File(format("{yarn_container_bin}/container-executor"),
group=params.yarn_executor_container_group,
mode=params.container_executor_mode
)
@@ -396,15 +169,8 @@ def yarn(name=None, config_dir=None):
mode=0755,
cd_access="a")
- if params.security_enabled:
- tc_mode = 0644
- tc_owner = "root"
- else:
- tc_mode = None
- tc_owner = params.hdfs_user
-
File(os.path.join(config_dir, "mapred-env.sh"),
- owner=tc_owner,
+ owner=params.tc_owner,
mode=0755,
content=InlineTemplate(params.mapred_env_sh_template)
)
@@ -416,35 +182,34 @@ def yarn(name=None, config_dir=None):
mode=06050
)
File(os.path.join(config_dir, 'taskcontroller.cfg'),
- owner = tc_owner,
- mode = tc_mode,
+ owner = params.tc_owner,
+ mode = params.tc_mode,
group = params.mapred_tt_group,
content=Template("taskcontroller.cfg.j2")
)
else:
File(os.path.join(config_dir, 'taskcontroller.cfg'),
- owner=tc_owner,
+ owner=params.tc_owner,
content=Template("taskcontroller.cfg.j2")
)
- if "mapred-site" in params.config['configurations']:
- XmlConfig("mapred-site.xml",
- conf_dir=config_dir,
- configurations=params.config['configurations']['mapred-site'],
- configuration_attributes=params.config['configuration_attributes']['mapred-site'],
- owner=params.mapred_user,
- group=params.user_group
- )
+ XmlConfig("mapred-site.xml",
+ conf_dir=config_dir,
+ configurations=params.config['configurations']['mapred-site'],
+ configuration_attributes=params.config['configuration_attributes']['mapred-site'],
+ owner=params.mapred_user,
+ group=params.user_group
+ )
+
+ XmlConfig("capacity-scheduler.xml",
+ conf_dir=config_dir,
+ configurations=params.config['configurations'][
+ 'capacity-scheduler'],
+ configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
+ owner=params.hdfs_user,
+ group=params.user_group
+ )
- if "capacity-scheduler" in params.config['configurations']:
- XmlConfig("capacity-scheduler.xml",
- conf_dir=config_dir,
- configurations=params.config['configurations'][
- 'capacity-scheduler'],
- configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
- owner=params.hdfs_user,
- group=params.user_group
- )
if "ssl-client" in params.config['configurations']:
XmlConfig("ssl-client.xml",
conf_dir=config_dir,
@@ -495,4 +260,239 @@ def yarn(name=None, config_dir=None):
File(os.path.join(config_dir, 'ssl-server.xml.example'),
owner=params.mapred_user,
group=params.user_group
- )
\ No newline at end of file
+ )
+
+def setup_historyserver():
+ import params
+
+ if params.yarn_log_aggregation_enabled:
+ params.HdfsResource(params.yarn_nm_app_log_dir,
+ action="create_on_execute",
+ type="directory",
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=01777,
+ recursive_chmod=True
+ )
+
+ # create the /tmp folder with proper permissions if it doesn't exist yet
+ if params.entity_file_history_directory.startswith('/tmp'):
+ params.HdfsResource(params.hdfs_tmp_dir,
+ action="create_on_execute",
+ type="directory",
+ owner=params.hdfs_user,
+ mode=0777,
+ )
+
+ params.HdfsResource(params.entity_file_history_directory,
+ action="create_on_execute",
+ type="directory",
+ owner=params.yarn_user,
+ group=params.user_group
+ )
+ params.HdfsResource("/mapred",
+ type="directory",
+ action="create_on_execute",
+ owner=params.mapred_user
+ )
+ params.HdfsResource("/mapred/system",
+ type="directory",
+ action="create_on_execute",
+ owner=params.hdfs_user
+ )
+ params.HdfsResource(params.mapreduce_jobhistory_done_dir,
+ type="directory",
+ action="create_on_execute",
+ owner=params.mapred_user,
+ group=params.user_group,
+ change_permissions_for_parents=True,
+ mode=0777
+ )
+ params.HdfsResource(None, action="execute")
+ Directory(params.jhs_leveldb_state_store_dir,
+ owner=params.mapred_user,
+ group=params.user_group,
+ create_parents = True,
+ cd_access="a",
+ recursive_ownership = True,
+ )
+
+def setup_nodemanager():
+ import params
+
+ # First start after enabling/disabling security
+ if params.toggle_nm_security:
+ Directory(params.nm_local_dirs_list + params.nm_log_dirs_list,
+ action='delete'
+ )
+
+ # If yarn.nodemanager.recovery.dir exists, remove this dir
+ if params.yarn_nodemanager_recovery_dir:
+ Directory(InlineTemplate(params.yarn_nodemanager_recovery_dir).get_content(),
+ action='delete'
+ )
+
+ # Setting NM marker file
+ if params.security_enabled:
+ Directory(params.nm_security_marker_dir)
+ File(params.nm_security_marker,
+ content="Marker file to track first start after enabling/disabling security. "
+ "During first start yarn local, log dirs are removed and recreated"
+ )
+ elif not params.security_enabled:
+ File(params.nm_security_marker, action="delete")
+
+
+ if not params.security_enabled or params.toggle_nm_security:
+ # handle_mounted_dirs ensures that we don't create dirs which are temporary unavailable (unmounted), and intended to reside on a different mount.
+ nm_log_dir_to_mount_file_content = handle_mounted_dirs(create_log_dir, params.nm_log_dirs, params.nm_log_dir_to_mount_file, params)
+ # create a history file used by handle_mounted_dirs
+ File(params.nm_log_dir_to_mount_file,
+ owner=params.hdfs_user,
+ group=params.user_group,
+ mode=0644,
+ content=nm_log_dir_to_mount_file_content
+ )
+ nm_local_dir_to_mount_file_content = handle_mounted_dirs(create_local_dir, params.nm_local_dirs, params.nm_local_dir_to_mount_file, params)
+ File(params.nm_local_dir_to_mount_file,
+ owner=params.hdfs_user,
+ group=params.user_group,
+ mode=0644,
+ content=nm_local_dir_to_mount_file_content
+ )
+
+def setup_resourcemanager():
+ import params
+
+ Directory(params.rm_nodes_exclude_dir,
+ mode=0755,
+ create_parents=True,
+ cd_access='a',
+ )
+ File(params.rm_nodes_exclude_path,
+ owner=params.yarn_user,
+ group=params.user_group
+ )
+ File(params.yarn_job_summary_log,
+ owner=params.yarn_user,
+ group=params.user_group
+ )
+ if not is_empty(params.node_label_enable) and params.node_label_enable or is_empty(params.node_label_enable) and params.node_labels_dir:
+ params.HdfsResource(params.node_labels_dir,
+ type="directory",
+ action="create_on_execute",
+ change_permissions_for_parents=True,
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0700
+ )
+ params.HdfsResource(None, action="execute")
+
+def setup_ats():
+ import params
+
+ Directory(params.ats_leveldb_dir,
+ owner=params.yarn_user,
+ group=params.user_group,
+ create_parents = True,
+ cd_access="a",
+ )
+
+ # if stack support application timeline-service state store property (timeline_state_store stack feature)
+ if params.stack_supports_timeline_state_store:
+ Directory(params.ats_leveldb_state_store_dir,
+ owner=params.yarn_user,
+ group=params.user_group,
+ create_parents = True,
+ cd_access="a",
+ )
+ # app timeline server 1.5 directories
+ if not is_empty(params.entity_groupfs_store_dir):
+ parent_path = os.path.dirname(params.entity_groupfs_store_dir)
+ params.HdfsResource(parent_path,
+ type="directory",
+ action="create_on_execute",
+ change_permissions_for_parents=True,
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0755
+ )
+ params.HdfsResource(params.entity_groupfs_store_dir,
+ type="directory",
+ action="create_on_execute",
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=params.entity_groupfs_store_dir_mode
+ )
+ if not is_empty(params.entity_groupfs_active_dir):
+ parent_path = os.path.dirname(params.entity_groupfs_active_dir)
+ params.HdfsResource(parent_path,
+ type="directory",
+ action="create_on_execute",
+ change_permissions_for_parents=True,
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0755
+ )
+ params.HdfsResource(params.entity_groupfs_active_dir,
+ type="directory",
+ action="create_on_execute",
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=params.entity_groupfs_active_dir_mode
+ )
+ params.HdfsResource(None, action="execute")
+
+def create_log_dir(dir_name):
+ import params
+ Directory(dir_name,
+ create_parents = True,
+ cd_access="a",
+ mode=0775,
+ owner=params.yarn_user,
+ group=params.user_group,
+ ignore_failures=True,
+ )
+
+def create_local_dir(dir_name):
+ import params
+ Directory(dir_name,
+ create_parents = True,
+ cd_access="a",
+ mode=0755,
+ owner=params.yarn_user,
+ group=params.user_group,
+ ignore_failures=True,
+ recursive_mode_flags = {'f': 'a+rw', 'd': 'a+rwx'},
+ )
+
+@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
+def yarn(name = None):
+ import params
+ XmlConfig("mapred-site.xml",
+ conf_dir=params.config_dir,
+ configurations=params.config['configurations']['mapred-site'],
+ owner=params.yarn_user,
+ mode='f'
+ )
+ XmlConfig("yarn-site.xml",
+ conf_dir=params.config_dir,
+ configurations=params.config['configurations']['yarn-site'],
+ owner=params.yarn_user,
+ mode='f',
+ configuration_attributes=params.config['configuration_attributes']['yarn-site']
+ )
+ XmlConfig("capacity-scheduler.xml",
+ conf_dir=params.config_dir,
+ configurations=params.config['configurations']['capacity-scheduler'],
+ owner=params.yarn_user,
+ mode='f'
+ )
+
+ if params.service_map.has_key(name):
+ service_name = params.service_map[name]
+
+ ServiceConfig(service_name,
+ action="change_user",
+ username = params.yarn_user,
+ password = Script.get_password(params.yarn_user))
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb1ceef/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 643f946..119dcf0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -464,7 +464,6 @@ class TestHistoryServer(RMFTestCase):
)
def assert_configure_secured(self):
-
self.assertResourceCalled('HdfsResource', '/app-logs',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = True,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb1ceef/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index 7b5ce18..c98a64d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -178,7 +178,19 @@ class TestResourceManager(RMFTestCase):
self.assertNoMoreResources()
def assert_configure_default(self):
-
+ self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+ mode = 0755,
+ create_parents = True,
+ cd_access = 'a',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+ owner = 'yarn',
+ group = 'hadoop',
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
@@ -268,19 +280,6 @@ class TestResourceManager(RMFTestCase):
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
- self.assertResourceCalled('Directory', '/etc/hadoop/conf',
- mode = 0755,
- create_parents = True,
- cd_access = 'a',
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
- owner = 'yarn',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
- owner = 'yarn',
- group = 'hadoop',
- )
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
@@ -347,7 +346,19 @@ class TestResourceManager(RMFTestCase):
)
def assert_configure_secured(self):
-
+ self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+ mode = 0755,
+ create_parents = True,
+ cd_access = 'a',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
+ owner = 'yarn',
+ group = 'hadoop',
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
@@ -437,19 +448,6 @@ class TestResourceManager(RMFTestCase):
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
- self.assertResourceCalled('Directory', '/etc/hadoop/conf',
- mode = 0755,
- create_parents = True,
- cd_access = 'a',
- )
- self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',
- owner = 'yarn',
- group = 'hadoop',
- )
- self.assertResourceCalled('File', '/var/log/hadoop-yarn/yarn/hadoop-mapreduce.jobsummary.log',
- owner = 'yarn',
- group = 'hadoop',
- )
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb1ceef/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index dc11ba9..40db813 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -92,6 +92,26 @@ class TestAppTimelineServer(RMFTestCase):
self.assertNoMoreResources()
def assert_configure_default(self):
+ self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/timeline',
+ owner = 'yarn',
+ group = 'hadoop',
+ create_parents = True,
+ cd_access='a'
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
+ hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = UnknownConfigurationMock(),
+ user = 'hdfs',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
@@ -181,26 +201,6 @@ class TestAppTimelineServer(RMFTestCase):
configurations = self.getConfig()['configurations']['capacity-scheduler'],
configuration_attributes = self.getConfig()['configuration_attributes']['capacity-scheduler']
)
- self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/timeline',
- owner = 'yarn',
- group = 'hadoop',
- create_parents = True,
- cd_access='a'
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- hadoop_bin_dir = '/usr/bin',
- keytab = UnknownConfigurationMock(),
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- dfs_type = '',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb1ceef/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py b/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
index abfe756..b523412 100644
--- a/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
+++ b/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
@@ -50,6 +50,100 @@ class TestAts(RMFTestCase):
self.assertNoMoreResources()
def assert_configure_default(self):
+ self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/timeline',
+ owner = 'yarn',
+ group = 'hadoop',
+ create_parents = True,
+ cd_access = 'a',
+ )
+ self.assertResourceCalled('HdfsResource', '/ats',
+ immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
+ hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = UnknownConfigurationMock(),
+ user = 'hdfs',
+ change_permissions_for_parents = True,
+ owner = 'yarn',
+ group = 'hadoop',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/ats/done',
+ immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
+ hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = UnknownConfigurationMock(),
+ user = 'hdfs',
+ owner = 'yarn',
+ group = 'hadoop',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ mode = 0700,
+ )
+ self.assertResourceCalled('HdfsResource', '/ats',
+ immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
+ hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = UnknownConfigurationMock(),
+ user = 'hdfs',
+ change_permissions_for_parents = True,
+ owner = 'yarn',
+ group = 'hadoop',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ mode = 0755,
+ )
+ self.assertResourceCalled('HdfsResource', '/ats/active',
+ immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ dfs_type = '',
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = UnknownConfigurationMock(),
+ user = 'hdfs',
+ owner = 'yarn',
+ group = 'hadoop',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ type = 'directory',
+ action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ mode = 01777,
+ )
+ self.assertResourceCalled('HdfsResource', None,
+ immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
+ security_enabled = False,
+ hadoop_bin_dir = '/usr/bin',
+ keytab = UnknownConfigurationMock(),
+ default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+ dfs_type = '',
+ hdfs_site = self.getConfig()['configurations']['hdfs-site'],
+ kinit_path_local = '/usr/bin/kinit',
+ principal_name = UnknownConfigurationMock(),
+ user = 'hdfs',
+ action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+ hadoop_conf_dir = '/etc/hadoop/conf',
+ )
self.assertResourceCalled('Directory', '/var/run/hadoop-yarn',
owner = 'yarn',
group = 'hadoop',
@@ -146,100 +240,6 @@ class TestAts(RMFTestCase):
owner = 'yarn',
configurations = self.getConfig()['configurations']['capacity-scheduler'],
)
- self.assertResourceCalled('Directory', '/var/log/hadoop-yarn/timeline',
- owner = 'yarn',
- group = 'hadoop',
- create_parents = True,
- cd_access = 'a',
- )
- self.assertResourceCalled('HdfsResource', '/ats',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- hadoop_bin_dir = '/usr/bin',
- keytab = UnknownConfigurationMock(),
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- dfs_type = '',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- change_permissions_for_parents = True,
- owner = 'yarn',
- group = 'hadoop',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0755,
- )
- self.assertResourceCalled('HdfsResource', '/ats/done',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- hadoop_bin_dir = '/usr/bin',
- keytab = UnknownConfigurationMock(),
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- dfs_type = '',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- owner = 'yarn',
- group = 'hadoop',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0700,
- )
- self.assertResourceCalled('HdfsResource', '/ats',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- hadoop_bin_dir = '/usr/bin',
- keytab = UnknownConfigurationMock(),
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- dfs_type = '',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- change_permissions_for_parents = True,
- owner = 'yarn',
- group = 'hadoop',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 0755,
- )
- self.assertResourceCalled('HdfsResource', '/ats/active',
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- hadoop_bin_dir = '/usr/bin',
- keytab = UnknownConfigurationMock(),
- dfs_type = '',
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- owner = 'yarn',
- group = 'hadoop',
- hadoop_conf_dir = '/etc/hadoop/conf',
- type = 'directory',
- action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- mode = 01777,
- )
- self.assertResourceCalled('HdfsResource', None,
- immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
- security_enabled = False,
- hadoop_bin_dir = '/usr/bin',
- keytab = UnknownConfigurationMock(),
- default_fs = 'hdfs://c6401.ambari.apache.org:8020',
- dfs_type = '',
- hdfs_site = self.getConfig()['configurations']['hdfs-site'],
- kinit_path_local = '/usr/bin/kinit',
- principal_name = UnknownConfigurationMock(),
- user = 'hdfs',
- action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
- hadoop_conf_dir = '/etc/hadoop/conf',
- )
self.assertResourceCalled('File', '/etc/security/limits.d/yarn.conf',
content = Template('yarn.conf.j2'),
mode = 0644,
[17/19] ambari git commit: AMBARI-18472 Can't switch to not current
version for not default config gorup. (ababiichuk)
Posted by nc...@apache.org.
AMBARI-18472 Can't switch to not current version for not default config gorup. (ababiichuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aad2133f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aad2133f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aad2133f
Branch: refs/heads/branch-dev-patch-upgrade
Commit: aad2133f857e6067f37a01cce8870f27a72d933b
Parents: e44b880
Author: ababiichuk <ab...@hortonworks.com>
Authored: Wed Sep 28 10:50:10 2016 +0300
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Wed Sep 28 15:30:43 2016 +0300
----------------------------------------------------------------------
.../app/mixins/common/configs/configs_loader.js | 2 +-
.../views/common/configs/config_history_flow.js | 46 ++++++++++----------
.../common/configs/configs_loader_test.js | 10 ++---
3 files changed, 29 insertions(+), 29 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/aad2133f/ambari-web/app/mixins/common/configs/configs_loader.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/configs/configs_loader.js b/ambari-web/app/mixins/common/configs/configs_loader.js
index 87dec7f..666cef8 100644
--- a/ambari-web/app/mixins/common/configs/configs_loader.js
+++ b/ambari-web/app/mixins/common/configs/configs_loader.js
@@ -71,6 +71,7 @@ App.ConfigsLoader = Em.Mixin.create(App.GroupsMappingMixin, {
if (!this.get('preSelectedConfigVersion')) {
this.set('selectedVersion', this.get('currentDefaultVersion'));
}
+ this.set('preSelectedConfigVersion', null);
},
/**
@@ -92,7 +93,6 @@ App.ConfigsLoader = Em.Mixin.create(App.GroupsMappingMixin, {
});
self.set('selectedConfigGroup', selectedGroup);
self.loadSelectedVersion(preSelectedVersion.get('version'), selectedGroup);
- self.set('preSelectedConfigVersion', null);
preSelectedVersion = null;
});
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/aad2133f/ambari-web/app/views/common/configs/config_history_flow.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/configs/config_history_flow.js b/ambari-web/app/views/common/configs/config_history_flow.js
index 07f111a..5ad65ac 100644
--- a/ambari-web/app/views/common/configs/config_history_flow.js
+++ b/ambari-web/app/views/common/configs/config_history_flow.js
@@ -173,6 +173,7 @@ App.ConfigHistoryFlowView = Em.View.extend({
placement: 'top'
});
this.$(".version-info-bar-wrapper").stick_in_parent({parent: '#serviceConfig', offset_top: 10});
+ this.onChangeConfigGroup();
},
willDestroyElement: function() {
@@ -211,38 +212,37 @@ App.ConfigHistoryFlowView = Em.View.extend({
onChangeConfigGroup: function () {
var serviceVersions = this.get('serviceVersions');
var selectedGroupName = this.get('controller.selectedConfigGroup.name');
+ var preselectedVersion = this.get('controller.selectedVersion');
var startIndex = 0;
var currentIndex = 0;
+ var isCurrentInDefaultGroupIndex = null;
+
serviceVersions.setEach('isDisplayed', false);
- //display the version belongs to current group
- if (this.get('controller.selectedConfigGroup.isDefault')) {
- // display current in default group
- serviceVersions.forEach(function (serviceVersion, index) {
- // find current in default group
- if (serviceVersion.get('isCurrent') && serviceVersion.get('groupName') === App.ServiceConfigGroup.defaultGroupName) {
- serviceVersion.set('isDisplayed', true);
- currentIndex = index + 1;
- }
- });
- } else {
- // display current in selected group
+ // display selected version from config history
+ serviceVersions.forEach(function (serviceVersion, index) {
+ // find selected version in group
+ if (serviceVersion.get('version') === preselectedVersion && serviceVersion.get('groupName') === selectedGroupName) {
+ serviceVersion.set('isDisplayed', true);
+ currentIndex = index + 1;
+ }
+ });
+ // display current in selected group
+ if (!currentIndex) {
serviceVersions.forEach(function (serviceVersion, index) {
// find current in selected group
if (serviceVersion.get('isCurrent') && serviceVersion.get('groupName') === selectedGroupName) {
serviceVersion.set('isDisplayed', true);
currentIndex = index + 1;
}
+ if (serviceVersion.get('isCurrent') && serviceVersion.get('groupName') === App.ServiceConfigGroup.defaultGroupName) {
+ isCurrentInDefaultGroupIndex = index;
+ }
});
- // no current version for selected group, show default group current version
- if (currentIndex == 0) {
- serviceVersions.forEach(function (serviceVersion, index) {
- // find current in default group
- if (serviceVersion.get('isCurrent') && serviceVersion.get('groupName') === App.ServiceConfigGroup.defaultGroupName) {
- currentIndex = index + 1;
- serviceVersion.set('isDisplayed', true);
- }
- });
+ // if there is no current version in selected group show current version from default group
+ if (!currentIndex) {
+ serviceVersions[isCurrentInDefaultGroupIndex].set('isDisplayed', true);
+ currentIndex = isCurrentInDefaultGroupIndex + 1;
}
}
// show current version as the last one
@@ -251,7 +251,7 @@ App.ConfigHistoryFlowView = Em.View.extend({
}
this.set('startIndex', startIndex);
this.adjustFlowView();
- }.observes('controller.selectedConfigGroup.name'),
+ }.observes('controller.selectedConfigGroup'),
/**
* define the first element in viewport
@@ -641,4 +641,4 @@ App.ConfigHistoryDropdownSubMenuView = Em.View.extend({
$("#config_version_popup").removeAttr('style');
}
})
-});
\ No newline at end of file
+});
http://git-wip-us.apache.org/repos/asf/ambari/blob/aad2133f/ambari-web/test/mixins/common/configs/configs_loader_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/configs/configs_loader_test.js b/ambari-web/test/mixins/common/configs/configs_loader_test.js
index e718bbe..8f22b44 100644
--- a/ambari-web/test/mixins/common/configs/configs_loader_test.js
+++ b/ambari-web/test/mixins/common/configs/configs_loader_test.js
@@ -103,6 +103,11 @@ describe('App.ConfigsLoader', function() {
mixin.loadServiceConfigVersionsSuccess({items: []});
expect(mixin.get('selectedVersion')).to.be.equal('v1');
});
+
+ it("preSelectedConfigVersion should be null", function() {
+ mixin.loadServiceConfigVersionsSuccess();
+ expect(mixin.get('preSelectedConfigVersion')).to.be.null;
+ });
});
describe("#loadPreSelectedConfigVersion()", function () {
@@ -213,11 +218,6 @@ describe('App.ConfigsLoader', function() {
mixin.loadPreSelectedConfigVersion();
expect(mixin.loadSelectedVersion.calledOnce).to.be.true;
});
-
- it("preSelectedConfigVersion should be null", function() {
- mixin.loadPreSelectedConfigVersion();
- expect(mixin.get('preSelectedConfigVersion')).to.be.null;
- });
});
describe("#loadCurrentVersions()", function () {
[14/19] ambari git commit: AMBARI-18467. Ambari server does not come
up after restart if cluster install fails. (Balazs Bence Sari via stoader)
Posted by nc...@apache.org.
AMBARI-18467. Ambari server does not come up after restart if cluster install fails. (Balazs Bence Sari via stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2700bd12
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2700bd12
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2700bd12
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2700bd125f2f9bbae2ebfcc0831ec881097b4cff
Parents: a0fff84
Author: Balazs Bence Sari <bs...@hortonworks.com>
Authored: Wed Sep 28 13:42:59 2016 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Wed Sep 28 13:42:59 2016 +0200
----------------------------------------------------------------------
.../stack/UpdateActiveRepoVersionOnStartup.java | 9 +++++--
.../UpdateActiveRepoVersionOnStartupTest.java | 28 +++++++++++++-------
2 files changed, 26 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/2700bd12/ambari-server/src/main/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartup.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartup.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartup.java
index 1413c66..8a32a42 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartup.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartup.java
@@ -94,9 +94,14 @@ public class UpdateActiveRepoVersionOnStartup {
LOG.info("Updating existing repo versions for cluster {} on stack {}-{}",
cluster.getClusterName(), stack.getName(), stack.getVersion());
ClusterVersionEntity clusterVersion = clusterVersionDao.findByClusterAndStateCurrent(cluster.getClusterName());
+ if (null != clusterVersion) {
RepositoryVersionEntity repoVersion = clusterVersion.getRepositoryVersion();
- updateRepoVersion(stack, repoVersion);
- repositoryVersionDao.merge(repoVersion);
+ updateRepoVersion(stack, repoVersion);
+ repositoryVersionDao.merge(repoVersion);
+ }
+ else {
+ LOG.warn("Missing cluster version for cluster {}", cluster.getClusterName());
+ }
}
}
catch(Exception ex) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/2700bd12/ambari-server/src/test/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest.java
index 9c54a88..24ab0e8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest.java
@@ -37,7 +37,6 @@ import org.apache.ambari.server.state.RepositoryInfo;
import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.stack.upgrade.RepositoryVersionHelper;
import org.junit.Assert;
-import org.junit.Before;
import org.junit.Test;
import com.google.common.base.Charsets;
@@ -61,10 +60,17 @@ public class UpdateActiveRepoVersionOnStartupTest {
@Test
public void addAServiceRepoToExistingRepoVersion() throws Exception {
+ init(true);
activeRepoUpdater.process();
verifyRepoIsAdded();
}
+ @Test
+ public void missingClusterVersionShouldNotCauseException() throws Exception {
+ init(false);
+ activeRepoUpdater.process();
+ }
+
/**
* Verifies if the add-on service repo is added to the repo version entity, both json and xml representations.
*
@@ -84,8 +90,7 @@ public class UpdateActiveRepoVersionOnStartupTest {
Assert.assertTrue(ADD_ON_REPO_ID + " is add-on repo was not added to JSON representation", serviceRepoAddedToJson);
}
- @Before
- public void init() throws Exception {
+ public void init(boolean addClusterVersion) throws Exception {
ClusterDAO clusterDao = mock(ClusterDAO.class);
ClusterVersionDAO clusterVersionDAO = mock(ClusterVersionDAO.class);
repositoryVersionDao = mock(RepositoryVersionDAO.class);
@@ -125,17 +130,22 @@ public class UpdateActiveRepoVersionOnStartupTest {
}
};
Injector injector = Guice.createInjector(testModule);
- repoVersion = new RepositoryVersionEntity();
- repoVersion.setStack(stackEntity);
- repoVersion.setOperatingSystems(resourceAsString("org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest_initialRepos.json"));
- ClusterVersionEntity clusterVersion = new ClusterVersionEntity();
- clusterVersion.setRepositoryVersion(repoVersion);
- when(clusterVersionDAO.findByClusterAndStateCurrent(CLUSTER_NAME)).thenReturn(clusterVersion);
+ if (addClusterVersion) {
+ repoVersion = new RepositoryVersionEntity();
+ repoVersion.setStack(stackEntity);
+ repoVersion.setOperatingSystems(resourceAsString("org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartupTest_initialRepos.json"));
+ ClusterVersionEntity clusterVersion = new ClusterVersionEntity();
+ clusterVersion.setRepositoryVersion(repoVersion);
+ when(clusterVersionDAO.findByClusterAndStateCurrent(CLUSTER_NAME)).thenReturn(clusterVersion);
+
+ }
activeRepoUpdater = new UpdateActiveRepoVersionOnStartup(clusterDao,
clusterVersionDAO, repositoryVersionDao, repositoryVersionHelper, metaInfo);
}
+
+
private static String resourceAsString(String resourceName) throws IOException {
return Resources.toString(Resources.getResource(resourceName), Charsets.UTF_8);
}
[03/19] ambari git commit: AMBARI-18449 Ambari create widget does not
show what the error is (Vivek Ratnavel Subramanian via zhewang)
Posted by nc...@apache.org.
AMBARI-18449 Ambari create widget does not show what the error is (Vivek Ratnavel Subramanian via zhewang)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/69e8f6fa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/69e8f6fa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/69e8f6fa
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 69e8f6fa89b5b2a4e6246e0e32b23c314a12df5c
Parents: ee4e63a
Author: Zhe (Joe) Wang <zh...@apache.org>
Authored: Mon Sep 26 13:12:09 2016 -0700
Committer: Zhe (Joe) Wang <zh...@apache.org>
Committed: Mon Sep 26 13:12:09 2016 -0700
----------------------------------------------------------------------
.../service/widgets/create/step2_controller.js | 26 ++++++++++++++++++++
.../main/service/widgets/create/expression.hbs | 10 ++++++--
.../main/service/widgets/create/step2_graph.hbs | 6 ++++-
.../service/widgets/create/step2_number.hbs | 6 ++++-
.../service/widgets/create/step2_template.hbs | 6 ++++-
.../service/widgets/create/expression_view.js | 14 +++++++++--
6 files changed, 61 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/69e8f6fa/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js b/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js
index 4e3ab91..8b1045a 100644
--- a/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js
+++ b/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js
@@ -138,6 +138,32 @@ App.WidgetWizardStep2Controller = Em.Controller.extend({
},
/**
+ * check whether any of the expressions is incomplete or invalid
+ * @returns {boolean}
+ */
+ isAnyExpressionInvalid: function() {
+ var isAnyExpressionInvalid = false;
+ switch (this.get('content.widgetType')) {
+ case "NUMBER":
+ case "GAUGE":
+ case "TEMPLATE":
+ isAnyExpressionInvalid = this.get('isSubmitDisabled') && this.get('expressions').someProperty('isEmpty', false);
+ break;
+ case "GRAPH":
+ var dataSets = this.get('dataSets'),
+ isNotEmpty = false;
+ for (var i = 0; i < dataSets.length; i++) {
+ if (dataSets[i].get('expression.data').length > 0) {
+ isNotEmpty = true;
+ break;
+ }
+ }
+ isAnyExpressionInvalid = this.get('isSubmitDisabled') && isNotEmpty;
+ }
+ return isAnyExpressionInvalid;
+ }.property('isSubmitDisabled'),
+
+ /**
* check whether data of graph widget is complete
* @param dataSets
* @returns {boolean} isComplete
http://git-wip-us.apache.org/repos/asf/ambari/blob/69e8f6fa/ambari-web/app/templates/main/service/widgets/create/expression.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/widgets/create/expression.hbs b/ambari-web/app/templates/main/service/widgets/create/expression.hbs
index 826cb43..0ad08bf 100644
--- a/ambari-web/app/templates/main/service/widgets/create/expression.hbs
+++ b/ambari-web/app/templates/main/service/widgets/create/expression.hbs
@@ -33,8 +33,15 @@
{{view App.AddNumberExpressionView valueBinding="view.numberValue" class="input-small"}}
<button class="btn add-on" {{action addNumber target="view"}} {{bindAttr disabled="view.isNumberValueInvalid"}}>{{t dashboard.widgets.wizard.step2.newNumber}}</button>
</div>
-
</div>
+{{#if view.isInvalid}}
+ <div class="alert alert-error">
+ Invalid expression!
+ {{#if view.isInvalidTextfield}}
+ Only numbers or operators are allowed in this field.
+ {{/if}}
+ </div>
+{{/if}}
<div class="metric-field">
{{#if view.expression.isRemovable}}
<a {{action removeExpression view.expression target="controller"}} class="remove-link"><i class="icon-trash"></i></a>
@@ -54,6 +61,5 @@
</div>
<div class="placeholder">{{t dashboard.widgets.wizard.step2.addMetrics}}</div>
{{/if}}
-
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/69e8f6fa/ambari-web/app/templates/main/service/widgets/create/step2_graph.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/widgets/create/step2_graph.hbs b/ambari-web/app/templates/main/service/widgets/create/step2_graph.hbs
index 17e4790..483abd6 100644
--- a/ambari-web/app/templates/main/service/widgets/create/step2_graph.hbs
+++ b/ambari-web/app/templates/main/service/widgets/create/step2_graph.hbs
@@ -20,7 +20,11 @@
<div class="alert alert-info">
{{t widget.create.wizard.step2.body.text}}
</div>
-
+{{#if isAnyExpressionInvalid}}
+ <div class="alert alert-error">
+ Expression is not complete or is invalid!
+ </div>
+{{/if}}
{{#each dataSet in dataSets}}
<fieldset>
<h5>{{view Ember.TextField valueBinding="dataSet.label"}}</h5>
http://git-wip-us.apache.org/repos/asf/ambari/blob/69e8f6fa/ambari-web/app/templates/main/service/widgets/create/step2_number.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/widgets/create/step2_number.hbs b/ambari-web/app/templates/main/service/widgets/create/step2_number.hbs
index 27fd7e4..13c4f4b 100644
--- a/ambari-web/app/templates/main/service/widgets/create/step2_number.hbs
+++ b/ambari-web/app/templates/main/service/widgets/create/step2_number.hbs
@@ -20,7 +20,11 @@
<div class="alert alert-info">
{{t widget.create.wizard.step2.body.text}}
</div>
-
+{{#if isAnyExpressionInvalid}}
+ <div class="alert alert-error">
+ Expression is not complete or is invalid!
+ </div>
+{{/if}}
{{#each expression in expressions}}
{{view App.WidgetWizardExpressionView expressionBinding="expression"}}
{{/each}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/69e8f6fa/ambari-web/app/templates/main/service/widgets/create/step2_template.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/widgets/create/step2_template.hbs b/ambari-web/app/templates/main/service/widgets/create/step2_template.hbs
index 6a8a469..c0faedc 100644
--- a/ambari-web/app/templates/main/service/widgets/create/step2_template.hbs
+++ b/ambari-web/app/templates/main/service/widgets/create/step2_template.hbs
@@ -33,7 +33,11 @@
<div class="alert alert-info">
{{t widget.create.wizard.step2.body.text}}
</div>
-
+{{#if isAnyExpressionInvalid}}
+ <div class="alert alert-error">
+ Expression is not complete or is invalid!
+ </div>
+{{/if}}
{{#each expression in expressions}}
<h5>{{EXPRESSION_PREFIX}}{{expression.id}}</h5>
{{view App.WidgetWizardExpressionView expressionBinding="expression"}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/69e8f6fa/ambari-web/app/views/main/service/widgets/create/expression_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/widgets/create/expression_view.js b/ambari-web/app/views/main/service/widgets/create/expression_view.js
index 32c664f..a12bf99 100644
--- a/ambari-web/app/views/main/service/widgets/create/expression_view.js
+++ b/ambari-web/app/views/main/service/widgets/create/expression_view.js
@@ -67,6 +67,11 @@ App.WidgetWizardExpressionView = Em.View.extend({
isInvalid: false,
/**
+ * @type {boolean}
+ */
+ isInvalidTextfield: false,
+
+ /**
* contains value of number added to expression
* @type {string}
*/
@@ -431,7 +436,8 @@ App.InputCursorTextfieldView = Ember.TextField.extend({
validateInput: function () {
var value = this.get('value');
var parentView = this.get('parentView');
- var isInvalid = false;
+ var isInvalid = false,
+ isInvalidTextfield = false;
if (!number_utils.isPositiveNumber(value)) {
if (value && parentView.get('OPERATORS').contains(value)) {
@@ -451,10 +457,12 @@ App.InputCursorTextfieldView = Ember.TextField.extend({
this.set('value', '');
} else if (value) {
// invalid operator
- isInvalid = true;
+ isInvalid = isInvalidTextfield = true;
}
}
this.set('isInvalid', isInvalid);
+ this.set('parentView.isInvalid', isInvalid);
+ this.set('parentView.isInvalidTextfield', isInvalidTextfield);
}.observes('value'),
keyDown: function (event) {
@@ -479,6 +487,8 @@ App.InputCursorTextfieldView = Ember.TextField.extend({
}));
this.set('numberValue', "");
this.set('isInvalid', false);
+ this.set('parentView.isInvalid', false);
+ this.set('parentView.isInvalidTextfield', false);
this.set('value', '');
}
}
[10/19] ambari git commit: AMBARI-18474. Kerberos wizard loses
request id on server restart (alexantonenko)
Posted by nc...@apache.org.
AMBARI-18474. Kerberos wizard loses request id on server restart (alexantonenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c265ae62
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c265ae62
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c265ae62
Branch: refs/heads/branch-dev-patch-upgrade
Commit: c265ae6228342146dc99e9b5681d8a2645a220fa
Parents: 7b92434
Author: Alex Antonenko <hi...@gmail.com>
Authored: Tue Sep 27 18:32:12 2016 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Tue Sep 27 21:52:49 2016 +0300
----------------------------------------------------------------------
ambari-web/app/models/cluster_states.js | 4 +++-
ambari-web/app/routes/add_kerberos_routes.js | 4 +++-
2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c265ae62/ambari-web/app/models/cluster_states.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/cluster_states.js b/ambari-web/app/models/cluster_states.js
index 7e80c32..cbc134d 100644
--- a/ambari-web/app/models/cluster_states.js
+++ b/ambari-web/app/models/cluster_states.js
@@ -146,7 +146,9 @@ App.clusterStatus = Em.Object.create(App.UserPref, {
this.set('localdb', response.localdb);
// restore HAWizard data if process was started
var isHAWizardStarted = App.isAuthorized('SERVICE.ENABLE_HA') && !App.isEmptyObject(response.localdb.HighAvailabilityWizard);
- if (params.data.overrideLocaldb || isHAWizardStarted) {
+ // restore Kerberos Wizard is started
+ var isKerberosWizardStarted = App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') && !App.isEmptyObject(response.localdb.KerberosWizard);
+ if (params.data.overrideLocaldb || isHAWizardStarted || isKerberosWizardStarted) {
var localdbTables = (App.db.data.app && App.db.data.app.tables) ? App.db.data.app.tables : {};
var authenticated = Em.get(App, 'db.data.app.authenticated') || false;
App.db.data = response.localdb;
http://git-wip-us.apache.org/repos/asf/ambari/blob/c265ae62/ambari-web/app/routes/add_kerberos_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_kerberos_routes.js b/ambari-web/app/routes/add_kerberos_routes.js
index 4291394..1568577 100644
--- a/ambari-web/app/routes/add_kerberos_routes.js
+++ b/ambari-web/app/routes/add_kerberos_routes.js
@@ -20,7 +20,9 @@ var App = require('app');
module.exports = App.WizardRoute.extend({
route: '/enable',
enter: function (router) {
- router.get('mainController').dataLoading().done(function () {
+ router.get('mainController').dataLoading().done(function() {
+ return App.clusterStatus.updateFromServer();
+ }).done(function () {
var kerberosWizardController = router.get('kerberosWizardController');
App.router.get('updateController').set('isWorking', false);
var popup = App.ModalPopup.show({
[16/19] ambari git commit: AMBARI-18478. Ambari UI - Service Actions
menu for pluralized value has grammatical error (onechiporenko)
Posted by nc...@apache.org.
AMBARI-18478. Ambari UI - Service Actions menu for pluralized value has grammatical error (onechiporenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e44b8805
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e44b8805
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e44b8805
Branch: refs/heads/branch-dev-patch-upgrade
Commit: e44b880514109011fadb9c274b3b8163f13390d8
Parents: 39858cc
Author: Oleg Nechiporenko <on...@apache.org>
Authored: Wed Sep 28 11:41:20 2016 +0300
Committer: Oleg Nechiporenko <on...@apache.org>
Committed: Wed Sep 28 15:29:33 2016 +0300
----------------------------------------------------------------------
ambari-web/app/assets/licenses/NOTICE.txt | 3 +
ambari-web/app/messages.js | 13 +-
ambari-web/app/utils/string_utils.js | 7 +-
.../app/views/common/rolling_restart_view.js | 19 +-
ambari-web/app/views/main/service/item.js | 3 +-
.../service/widgets/create/expression_view.js | 2 +-
ambari-web/brunch-config.js | 3 +-
.../resourceManager/wizard_controller_test.js | 1 -
ambari-web/test/models/cluster_test.js | 12 +-
.../objects/service_config_property_test.js | 31 +-
.../configs/theme/sub_section_tab_test.js | 2 +-
.../test/views/main/host/log_metrics_test.js | 1 -
ambari-web/test/views/main/host_test.js | 4 +-
ambari-web/vendor/scripts/pluralize.js | 461 +++++++++++++++++++
14 files changed, 506 insertions(+), 56 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/app/assets/licenses/NOTICE.txt
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/licenses/NOTICE.txt b/ambari-web/app/assets/licenses/NOTICE.txt
index c750a37..75a13ea 100644
--- a/ambari-web/app/assets/licenses/NOTICE.txt
+++ b/ambari-web/app/assets/licenses/NOTICE.txt
@@ -60,3 +60,6 @@ Copyright (C) 2015 Leaf Corcoran (leafot [at] gmail [*dot*] com)
This product includes bootstrap-contextmenu v.0.3.3 (https://github.com/sydcanem/bootstrap-contextmenu - MIT License)
Copyright (C) 2015 James Santos
+
+This product includes pluralize v.3.0.0 (https://github.com/blakeembrey/pluralize - MIT License)
+Copyright (C) 2016 Blake Embrey
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 2c819e5..1c53839 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -2913,14 +2913,13 @@ Em.I18n.translations = {
'tableView.filters.filteredAlertInstancesInfo': '{0} of {1} instances showing',
'tableView.filters.filteredLogsInfo': '{0} of {1} logs showing',
- 'rollingrestart.dialog.title': 'Restart {0}s',
+ 'rollingrestart.dialog.title': 'Restart {0}',
'rollingrestart.dialog.primary': 'Trigger Rolling Restart',
'rollingrestart.notsupported.hostComponent': 'Rolling restart not supported for {0} components',
- 'rollingrestart.dialog.msg.restart': 'This will restart a specified number of {0}s at a time.',
- 'rollingrestart.dialog.msg.noRestartHosts': 'There are no {0}s to do rolling restarts',
+ 'rollingrestart.dialog.msg.restart': 'This will restart a specified number of {0} at a time.',
+ 'rollingrestart.dialog.msg.noRestartHosts': 'There are no {0} to do rolling restarts',
'rollingrestart.dialog.msg.maintainance': 'Note: {0} {1} in Maintenance Mode will not be restarted',
- 'rollingrestart.dialog.msg.maintainance.plural': 'Note: {0} {1}s in Maintenance Mode will not be restarted',
- 'rollingrestart.dialog.msg.componentsAtATime': '{0}s at a time',
+ 'rollingrestart.dialog.msg.componentsAtATime': '{0} at a time',
'rollingrestart.dialog.msg.timegap.prefix': 'Wait ',
'rollingrestart.dialog.msg.timegap.suffix': 'seconds between batches ',
'rollingrestart.dialog.msg.toleration.prefix': 'Tolerate up to ',
@@ -2930,7 +2929,7 @@ Em.I18n.translations = {
'rollingrestart.dialog.err.invalid.toleratesize': 'Invalid failure toleration count: {0}',
'rollingrestart.dialog.warn.datanode.batch.size': 'Restarting more than one DataNode at a time is not recommended. Doing so can lead to data unavailability and/or possible loss of data being actively written to HDFS.',
'rollingrestart.dialog.msg.serviceNotInMM':'Note: This will trigger alerts. To suppress alerts, turn on Maintenance Mode for {0} prior to triggering a rolling restart',
- 'rollingrestart.dialog.msg.staleConfigsOnly': 'Only restart {0}s with stale configs',
+ 'rollingrestart.dialog.msg.staleConfigsOnly': 'Only restart {0} with stale configs',
'rollingrestart.rest.context': 'Rolling Restart of {0}s - batch {1} of {2}',
'rollingrestart.context.allOnSelectedHosts':'Restart all components on the selected hosts',
'rollingrestart.context.allForSelectedService':'Restart all components for {0}',
@@ -2962,7 +2961,7 @@ Em.I18n.translations = {
'widget.create.wizard.step2.addExpression': 'Add Expression',
'widget.create.wizard.step2.addDataset': 'Add data set',
'widget.create.wizard.step2.body.gauge.overflow.warning':'Overflowed! Gauge can only display number between 0 and 1.',
- 'widget.create.wizard.step2.allComponents': 'All {0}s',
+ 'widget.create.wizard.step2.allComponents': 'All {0}',
'widget.create.wizard.step2.activeComponents': 'Active {0}',
'widget.create.wizard.step2.noMetricFound': 'No metric found',
'widget.create.wizard.step3.widgetName': 'Name',
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/app/utils/string_utils.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/string_utils.js b/ambari-web/app/utils/string_utils.js
index 3754ba1..f4e3674 100644
--- a/ambari-web/app/utils/string_utils.js
+++ b/ambari-web/app/utils/string_utils.js
@@ -201,11 +201,8 @@ module.exports = {
* @method pluralize
*/
pluralize: function(count, singular, plural) {
- plural = plural || singular + 's';
- if (count > 1) {
- return plural;
- }
- return singular;
+ var _plural = plural || pluralize(singular);
+ return count > 1 ? _plural : singular;
},
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/app/views/common/rolling_restart_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/rolling_restart_view.js b/ambari-web/app/views/common/rolling_restart_view.js
index 0d849a2..64b8610 100644
--- a/ambari-web/app/views/common/rolling_restart_view.js
+++ b/ambari-web/app/views/common/rolling_restart_view.js
@@ -119,7 +119,7 @@ App.RollingRestartView = Em.View.extend({
* List of errors is saved to <code>errors</code>
*/
validate : function() {
- var displayName = this.get('hostComponentDisplayName');
+ var displayName = pluralize(this.get('hostComponentDisplayName'));
var componentName = this.get('hostComponentName');
var totalCount = this.get('restartHostComponents.length');
var bs = this.get('batchSize');
@@ -207,7 +207,9 @@ App.RollingRestartView = Em.View.extend({
/**
* @type {String}
*/
- restartMessage: Em.computed.i18nFormat('rollingrestart.dialog.msg.restart', 'hostComponentDisplayName'),
+ restartMessage : function() {
+ return Em.I18n.t('rollingrestart.dialog.msg.restart').format(pluralize(this.get('hostComponentDisplayName')));
+ }.property('hostComponentDisplayName'),
/**
* @type {String}
@@ -216,10 +218,7 @@ App.RollingRestartView = Em.View.extend({
var count = this.get('componentsWithMaintenanceHost.length');
if (count > 0) {
var name = this.get('hostComponentDisplayName');
- if (count > 1) {
- return Em.I18n.t('rollingrestart.dialog.msg.maintainance.plural').format(count, name)
- }
- return Em.I18n.t('rollingrestart.dialog.msg.maintainance').format(count, name)
+ return Em.I18n.t('rollingrestart.dialog.msg.maintainance').format(count, pluralize(name));
}
return null;
}.property('componentsWithMaintenanceHost', 'hostComponentDisplayName'),
@@ -227,11 +226,15 @@ App.RollingRestartView = Em.View.extend({
/**
* @type {String}
*/
- batchSizeMessage: Em.computed.i18nFormat('rollingrestart.dialog.msg.componentsAtATime', 'hostComponentDisplayName'),
+ batchSizeMessage : function() {
+ return Em.I18n.t('rollingrestart.dialog.msg.componentsAtATime').format(pluralize(this.get('hostComponentDisplayName')));
+ }.property('hostComponentDisplayName'),
/**
* @type {String}
*/
- staleConfigsOnlyMessage: Em.computed.i18nFormat('rollingrestart.dialog.msg.staleConfigsOnly', 'hostComponentDisplayName')
+ staleConfigsOnlyMessage : function() {
+ return Em.I18n.t('rollingrestart.dialog.msg.staleConfigsOnly').format(pluralize(this.get('hostComponentDisplayName')));
+ }.property('hostComponentDisplayName')
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index a007e17..fc9c4f3 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -146,9 +146,10 @@ App.MainServiceItemView = Em.View.extend({
allSlaves.concat(allMasters).filter(function (_component) {
return App.get('components.rollinRestartAllowed').contains(_component);
}).forEach(function(_component) {
+ var _componentNamePluralized = pluralize(App.format.role(_component, false));
options.push(self.createOption(actionMap.ROLLING_RESTART, {
context: _component,
- label: actionMap.ROLLING_RESTART.label.format(App.format.role(_component, false))
+ label: actionMap.ROLLING_RESTART.label.format(_componentNamePluralized)
}));
});
allMasters.filter(function(master) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/app/views/main/service/widgets/create/expression_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/widgets/create/expression_view.js b/ambari-web/app/views/main/service/widgets/create/expression_view.js
index a12bf99..7afe287 100644
--- a/ambari-web/app/views/main/service/widgets/create/expression_view.js
+++ b/ambari-web/app/views/main/service/widgets/create/expression_view.js
@@ -358,7 +358,7 @@ App.AddMetricExpressionView = Em.View.extend({
return Em.I18n.t('widget.create.wizard.step2.activeComponents').format(stackComponent.get('displayName'));
}
}
- return Em.I18n.t('widget.create.wizard.step2.allComponents').format(stackComponent.get('displayName'));
+ return Em.I18n.t('widget.create.wizard.step2.allComponents').format(pluralize(stackComponent.get('displayName')));
}.property('componentName', 'level'),
count: servicesMap[serviceName].components[componentId].count,
metrics: servicesMap[serviceName].components[componentId].metrics.uniq().sort(),
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/brunch-config.js
----------------------------------------------------------------------
diff --git a/ambari-web/brunch-config.js b/ambari-web/brunch-config.js
index 64ac946..d71f8da 100644
--- a/ambari-web/brunch-config.js
+++ b/ambari-web/brunch-config.js
@@ -74,7 +74,8 @@ module.exports.config = {
'vendor/scripts/spin.js',
'vendor/scripts/jquery.flexibleArea.js',
'vendor/scripts/FileSaver.js',
- 'vendor/scripts/Blob.js'
+ 'vendor/scripts/Blob.js',
+ 'vendor/scripts/pluralize.js'
]
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/test/controllers/main/admin/highAvailability/resourceManager/wizard_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/highAvailability/resourceManager/wizard_controller_test.js b/ambari-web/test/controllers/main/admin/highAvailability/resourceManager/wizard_controller_test.js
index 5a991d3..19fbea6 100644
--- a/ambari-web/test/controllers/main/admin/highAvailability/resourceManager/wizard_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/highAvailability/resourceManager/wizard_controller_test.js
@@ -18,7 +18,6 @@
var App = require('app');
require('controllers/main/admin/highAvailability/resourceManager/wizard_controller');
-var testHelpers = require('test/helpers');
describe('App.RMHighAvailabilityWizardController', function () {
var controller;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/test/models/cluster_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/cluster_test.js b/ambari-web/test/models/cluster_test.js
index a6bafba..604e50a 100644
--- a/ambari-web/test/models/cluster_test.js
+++ b/ambari-web/test/models/cluster_test.js
@@ -29,7 +29,7 @@ describe('App.Cluster', function () {
describe('#isKerberosEnabled', function () {
- var cases = [
+ [
{
securityType: 'KERBEROS',
isKerberosEnabled: true,
@@ -40,9 +40,7 @@ describe('App.Cluster', function () {
isKerberosEnabled: false,
title: 'Kerberos disabled'
}
- ];
-
- cases.forEach(function (item) {
+ ].forEach(function (item) {
it(item.title, function () {
cluster.set('securityType', item.securityType);
@@ -53,7 +51,7 @@ describe('App.Cluster', function () {
describe('#isCredentialStorePersistent', function () {
- var cases = [
+ [
{
propertyValue: 'false',
isCredentialStorePersistent: false,
@@ -69,9 +67,7 @@ describe('App.Cluster', function () {
isCredentialStorePersistent: true,
title: 'persistent credential store'
}
- ];
-
- cases.forEach(function (item) {
+ ].forEach(function (item) {
it(item.title, function () {
cluster.set('credentialStoreProperties', {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/test/models/configs/objects/service_config_property_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/configs/objects/service_config_property_test.js b/ambari-web/test/models/configs/objects/service_config_property_test.js
index 49613a44..ef0bd61 100644
--- a/ambari-web/test/models/configs/objects/service_config_property_test.js
+++ b/ambari-web/test/models/configs/objects/service_config_property_test.js
@@ -189,9 +189,7 @@ describe('App.ServiceConfigProperty', function () {
App.TestAliases.testAsComputedAnd(getProperty(), 'hideFinalIcon', ['!isFinal', 'isNotEditable']);
describe('#placeholder', function () {
- it('should equal foo', function() {
- serviceConfigProperty.set('isEditable', true);
- var testCases = [
+ [
{
placeholderText: 'foo',
savedValue: ''
@@ -204,26 +202,19 @@ describe('App.ServiceConfigProperty', function () {
placeholderText: 'foo',
savedValue: 'bar'
}
- ];
- testCases.forEach(function (item) {
- serviceConfigProperty.set('placeholderText', item.placeholderText);
- serviceConfigProperty.set('savedValue', item.savedValue);
- expect(serviceConfigProperty.get('placeholder')).to.equal('foo');
- });
+ ].forEach(function (item) {
+ it('should equal foo, placeholder = ' + JSON.stringify(item.placeholderText), function() {
+ serviceConfigProperty.set('isEditable', true);
+ serviceConfigProperty.set('placeholderText', item.placeholderText);
+ serviceConfigProperty.set('savedValue', item.savedValue);
+ expect(serviceConfigProperty.get('placeholder')).to.equal('foo');
+ });
});
it('should equal null', function() {
serviceConfigProperty.set('isEditable', false);
- var testCases = [
- {
- placeholderText: 'foo',
- savedValue: 'bar'
- }
- ];
- testCases.forEach(function (item) {
- serviceConfigProperty.set('placeholderText', item.placeholderText);
- serviceConfigProperty.set('savedValue', item.savedValue);
- expect(serviceConfigProperty.get('placeholder')).to.equal(null);
- });
+ serviceConfigProperty.set('placeholderText', 'foo');
+ serviceConfigProperty.set('savedValue', 'bar');
+ expect(serviceConfigProperty.get('placeholder')).to.equal(null);
});
});
describe('#isPropertyOverridable', function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/test/models/configs/theme/sub_section_tab_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/configs/theme/sub_section_tab_test.js b/ambari-web/test/models/configs/theme/sub_section_tab_test.js
index 6044432..0c3b98c 100644
--- a/ambari-web/test/models/configs/theme/sub_section_tab_test.js
+++ b/ambari-web/test/models/configs/theme/sub_section_tab_test.js
@@ -155,7 +155,7 @@ describe('App.SubSectionTab', function () {
it('should include visible properties with errors', function () {
subSectionTab.set('configs', configs);
- expect(subSectionTab.get('errorsCount')).to.eql(8);
+ expect(subSectionTab.get('errorsCount')).to.be.equal(8);
});
});
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/test/views/main/host/log_metrics_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/host/log_metrics_test.js b/ambari-web/test/views/main/host/log_metrics_test.js
index a0a3c6c..52f4e55 100644
--- a/ambari-web/test/views/main/host/log_metrics_test.js
+++ b/ambari-web/test/views/main/host/log_metrics_test.js
@@ -17,7 +17,6 @@
*/
var App = require('app');
-var fileUtils = require('utils/file_utils');
describe('App.MainHostLogMetrics', function() {
var view;
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/test/views/main/host_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/host_test.js b/ambari-web/test/views/main/host_test.js
index 0b789f6..79cc65c 100644
--- a/ambari-web/test/views/main/host_test.js
+++ b/ambari-web/test/views/main/host_test.js
@@ -773,7 +773,7 @@ describe('App.MainHostView', function () {
describe("#restartRequiredComponentsMessage", function () {
it("5 components require restart", function() {
- var content = 'c1, c2, c3, c4, c5' + ' ' + Em.I18n.t('common.components').toLowerCase();
+ var content = 'c1, c2, c3, c4, c5 ' + Em.I18n.t('common.components').toLowerCase();
hostView.set('content.componentsWithStaleConfigsCount', 5);
hostView.set('content.componentsWithStaleConfigs', [
{displayName: 'c1'},
@@ -789,7 +789,7 @@ describe('App.MainHostView', function () {
});
it("1 component require restart", function() {
- var content = 'c1' + ' ' + Em.I18n.t('common.component').toLowerCase();
+ var content = 'c1 ' + Em.I18n.t('common.component').toLowerCase();
hostView.set('content.componentsWithStaleConfigsCount', 1);
hostView.set('content.componentsWithStaleConfigs', [
{displayName: 'c1'}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e44b8805/ambari-web/vendor/scripts/pluralize.js
----------------------------------------------------------------------
diff --git a/ambari-web/vendor/scripts/pluralize.js b/ambari-web/vendor/scripts/pluralize.js
new file mode 100644
index 0000000..7246db1
--- /dev/null
+++ b/ambari-web/vendor/scripts/pluralize.js
@@ -0,0 +1,461 @@
+/* global define */
+
+(function (root, pluralize) {
+ /* istanbul ignore else */
+ if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') {
+ // Node.
+ module.exports = pluralize();
+ } else if (typeof define === 'function' && define.amd) {
+ // AMD, registers as an anonymous module.
+ define(function () {
+ return pluralize();
+ });
+ } else {
+ // Browser global.
+ root.pluralize = pluralize();
+ }
+})(this, function () {
+ // Rule storage - pluralize and singularize need to be run sequentially,
+ // while other rules can be optimized using an object for instant lookups.
+ var pluralRules = [];
+ var singularRules = [];
+ var uncountables = {};
+ var irregularPlurals = {};
+ var irregularSingles = {};
+
+ /**
+ * Title case a string.
+ *
+ * @param {string} str
+ * @return {string}
+ */
+ function toTitleCase (str) {
+ return str.charAt(0).toUpperCase() + str.substr(1).toLowerCase();
+ }
+
+ /**
+ * Sanitize a pluralization rule to a usable regular expression.
+ *
+ * @param {(RegExp|string)} rule
+ * @return {RegExp}
+ */
+ function sanitizeRule (rule) {
+ if (typeof rule === 'string') {
+ return new RegExp('^' + rule + '$', 'i');
+ }
+
+ return rule;
+ }
+
+ /**
+ * Pass in a word token to produce a function that can replicate the case on
+ * another word.
+ *
+ * @param {string} word
+ * @param {string} token
+ * @return {Function}
+ */
+ function restoreCase (word, token) {
+ // Tokens are an exact match.
+ if (word === token) {
+ return token;
+ }
+
+ // Upper cased words. E.g. "HELLO".
+ if (word === word.toUpperCase()) {
+ return token.toUpperCase();
+ }
+
+ // Title cased words. E.g. "Title".
+ if (word[0] === word[0].toUpperCase()) {
+ return toTitleCase(token);
+ }
+
+ // Lower cased words. E.g. "test".
+ return token.toLowerCase();
+ }
+
+ /**
+ * Interpolate a regexp string.
+ *
+ * @param {string} str
+ * @param {Array} args
+ * @return {string}
+ */
+ function interpolate (str, args) {
+ return str.replace(/\$(\d{1,2})/g, function (match, index) {
+ return args[index] || '';
+ });
+ }
+
+ /**
+ * Sanitize a word by passing in the word and sanitization rules.
+ *
+ * @param {string} token
+ * @param {string} word
+ * @param {Array} collection
+ * @return {string}
+ */
+ function sanitizeWord (token, word, collection) {
+ // Empty string or doesn't need fixing.
+ if (!token.length || uncountables.hasOwnProperty(token)) {
+ return word;
+ }
+
+ var len = collection.length;
+
+ // Iterate over the sanitization rules and use the first one to match.
+ while (len--) {
+ var rule = collection[len];
+
+ // If the rule passes, return the replacement.
+ if (rule[0].test(word)) {
+ return word.replace(rule[0], function (match, index, word) {
+ var result = interpolate(rule[1], arguments);
+
+ if (match === '') {
+ return restoreCase(word[index - 1], result);
+ }
+
+ return restoreCase(match, result);
+ });
+ }
+ }
+
+ return word;
+ }
+
+ /**
+ * Replace a word with the updated word.
+ *
+ * @param {Object} replaceMap
+ * @param {Object} keepMap
+ * @param {Array} rules
+ * @return {Function}
+ */
+ function replaceWord (replaceMap, keepMap, rules) {
+ return function (word) {
+ // Get the correct token and case restoration functions.
+ var token = word.toLowerCase();
+
+ // Check against the keep object map.
+ if (keepMap.hasOwnProperty(token)) {
+ return restoreCase(word, token);
+ }
+
+ // Check against the replacement map for a direct word replacement.
+ if (replaceMap.hasOwnProperty(token)) {
+ return restoreCase(word, replaceMap[token]);
+ }
+
+ // Run all the rules against the word.
+ return sanitizeWord(token, word, rules);
+ };
+ }
+
+ /**
+ * Pluralize or singularize a word based on the passed in count.
+ *
+ * @param {string} word
+ * @param {number} count
+ * @param {boolean} inclusive
+ * @return {string}
+ */
+ function pluralize (word, count, inclusive) {
+ var pluralized = count === 1
+ ? pluralize.singular(word) : pluralize.plural(word);
+
+ return (inclusive ? count + ' ' : '') + pluralized;
+ }
+
+ /**
+ * Pluralize a word.
+ *
+ * @type {Function}
+ */
+ pluralize.plural = replaceWord(
+ irregularSingles, irregularPlurals, pluralRules
+ );
+
+ /**
+ * Singularize a word.
+ *
+ * @type {Function}
+ */
+ pluralize.singular = replaceWord(
+ irregularPlurals, irregularSingles, singularRules
+ );
+
+ /**
+ * Add a pluralization rule to the collection.
+ *
+ * @param {(string|RegExp)} rule
+ * @param {string} replacement
+ */
+ pluralize.addPluralRule = function (rule, replacement) {
+ pluralRules.push([sanitizeRule(rule), replacement]);
+ };
+
+ /**
+ * Add a singularization rule to the collection.
+ *
+ * @param {(string|RegExp)} rule
+ * @param {string} replacement
+ */
+ pluralize.addSingularRule = function (rule, replacement) {
+ singularRules.push([sanitizeRule(rule), replacement]);
+ };
+
+ /**
+ * Add an uncountable word rule.
+ *
+ * @param {(string|RegExp)} word
+ */
+ pluralize.addUncountableRule = function (word) {
+ if (typeof word === 'string') {
+ uncountables[word.toLowerCase()] = true;
+ return;
+ }
+
+ // Set singular and plural references for the word.
+ pluralize.addPluralRule(word, '$0');
+ pluralize.addSingularRule(word, '$0');
+ };
+
+ /**
+ * Add an irregular word definition.
+ *
+ * @param {string} single
+ * @param {string} plural
+ */
+ pluralize.addIrregularRule = function (single, plural) {
+ plural = plural.toLowerCase();
+ single = single.toLowerCase();
+
+ irregularSingles[single] = plural;
+ irregularPlurals[plural] = single;
+ };
+
+ /**
+ * Irregular rules.
+ */
+ [
+ // Pronouns.
+ ['I', 'we'],
+ ['me', 'us'],
+ ['he', 'they'],
+ ['she', 'they'],
+ ['them', 'them'],
+ ['myself', 'ourselves'],
+ ['yourself', 'yourselves'],
+ ['itself', 'themselves'],
+ ['herself', 'themselves'],
+ ['himself', 'themselves'],
+ ['themself', 'themselves'],
+ ['is', 'are'],
+ ['was', 'were'],
+ ['has', 'have'],
+ ['this', 'these'],
+ ['that', 'those'],
+ // Words ending in with a consonant and `o`.
+ ['echo', 'echoes'],
+ ['dingo', 'dingoes'],
+ ['volcano', 'volcanoes'],
+ ['tornado', 'tornadoes'],
+ ['torpedo', 'torpedoes'],
+ // Ends with `us`.
+ ['genus', 'genera'],
+ ['viscus', 'viscera'],
+ // Ends with `ma`.
+ ['stigma', 'stigmata'],
+ ['stoma', 'stomata'],
+ ['dogma', 'dogmata'],
+ ['lemma', 'lemmata'],
+ ['schema', 'schemata'],
+ ['anathema', 'anathemata'],
+ // Other irregular rules.
+ ['ox', 'oxen'],
+ ['axe', 'axes'],
+ ['die', 'dice'],
+ ['yes', 'yeses'],
+ ['foot', 'feet'],
+ ['eave', 'eaves'],
+ ['goose', 'geese'],
+ ['tooth', 'teeth'],
+ ['quiz', 'quizzes'],
+ ['human', 'humans'],
+ ['proof', 'proofs'],
+ ['carve', 'carves'],
+ ['valve', 'valves'],
+ ['looey', 'looies'],
+ ['thief', 'thieves'],
+ ['groove', 'grooves'],
+ ['pickaxe', 'pickaxes'],
+ ['whiskey', 'whiskies']
+ ].forEach(function (rule) {
+ return pluralize.addIrregularRule(rule[0], rule[1]);
+ });
+
+ /**
+ * Pluralization rules.
+ */
+ [
+ [/s?$/i, 's'],
+ [/([^aeiou]ese)$/i, '$1'],
+ [/(ax|test)is$/i, '$1es'],
+ [/(alias|[^aou]us|tlas|gas|ris)$/i, '$1es'],
+ [/(e[mn]u)s?$/i, '$1s'],
+ [/([^l]ias|[aeiou]las|[emjzr]as|[iu]am)$/i, '$1'],
+ [/(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat)(?:us|i)$/i, '$1i'],
+ [/(alumn|alg|vertebr)(?:a|ae)$/i, '$1ae'],
+ [/(seraph|cherub)(?:im)?$/i, '$1im'],
+ [/(her|at|gr)o$/i, '$1oes'],
+ [/(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|automat|quor)(?:a|um)$/i, '$1a'],
+ [/(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat)(?:a|on)$/i, '$1a'],
+ [/sis$/i, 'ses'],
+ [/(?:(kni|wi|li)fe|(ar|l|ea|eo|oa|hoo)f)$/i, '$1$2ves'],
+ [/([^aeiouy]|qu)y$/i, '$1ies'],
+ [/([^ch][ieo][ln])ey$/i, '$1ies'],
+ [/(x|ch|ss|sh|zz)$/i, '$1es'],
+ [/(matr|cod|mur|sil|vert|ind|append)(?:ix|ex)$/i, '$1ices'],
+ [/(m|l)(?:ice|ouse)$/i, '$1ice'],
+ [/(pe)(?:rson|ople)$/i, '$1ople'],
+ [/(child)(?:ren)?$/i, '$1ren'],
+ [/eaux$/i, '$0'],
+ [/m[ae]n$/i, 'men'],
+ ['thou', 'you']
+ ].forEach(function (rule) {
+ return pluralize.addPluralRule(rule[0], rule[1]);
+ });
+
+ /**
+ * Singularization rules.
+ */
+ [
+ [/s$/i, ''],
+ [/(ss)$/i, '$1'],
+ [/((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)(?:sis|ses)$/i, '$1sis'],
+ [/(^analy)(?:sis|ses)$/i, '$1sis'],
+ [/(wi|kni|(?:after|half|high|low|mid|non|night|[^\w]|^)li)ves$/i, '$1fe'],
+ [/(ar|(?:wo|[ae])l|[eo][ao])ves$/i, '$1f'],
+ [/ies$/i, 'y'],
+ [/\b([pl]|zomb|(?:neck|cross)?t|coll|faer|food|gen|goon|group|lass|talk|goal|cut)ies$/i, '$1ie'],
+ [/\b(mon|smil)ies$/i, '$1ey'],
+ [/(m|l)ice$/i, '$1ouse'],
+ [/(seraph|cherub)im$/i, '$1'],
+ [/(x|ch|ss|sh|zz|tto|go|cho|alias|[^aou]us|tlas|gas|(?:her|at|gr)o|ris)(?:es)?$/i, '$1'],
+ [/(e[mn]u)s?$/i, '$1'],
+ [/(movie|twelve)s$/i, '$1'],
+ [/(cris|test|diagnos)(?:is|es)$/i, '$1is'],
+ [/(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat)(?:us|i)$/i, '$1us'],
+ [/(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|quor)a$/i, '$1um'],
+ [/(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat)a$/i, '$1on'],
+ [/(alumn|alg|vertebr)ae$/i, '$1a'],
+ [/(cod|mur|sil|vert|ind)ices$/i, '$1ex'],
+ [/(matr|append)ices$/i, '$1ix'],
+ [/(pe)(rson|ople)$/i, '$1rson'],
+ [/(child)ren$/i, '$1'],
+ [/(eau)x?$/i, '$1'],
+ [/men$/i, 'man']
+ ].forEach(function (rule) {
+ return pluralize.addSingularRule(rule[0], rule[1]);
+ });
+
+ /**
+ * Uncountable rules.
+ */
+ [
+ // Singular words with no plurals.
+ 'advice',
+ 'adulthood',
+ 'agenda',
+ 'aid',
+ 'alcohol',
+ 'ammo',
+ 'athletics',
+ 'bison',
+ 'blood',
+ 'bream',
+ 'buffalo',
+ 'butter',
+ 'carp',
+ 'cash',
+ 'chassis',
+ 'chess',
+ 'clothing',
+ 'commerce',
+ 'cod',
+ 'cooperation',
+ 'corps',
+ 'digestion',
+ 'debris',
+ 'diabetes',
+ 'energy',
+ 'equipment',
+ 'elk',
+ 'excretion',
+ 'expertise',
+ 'flounder',
+ 'fun',
+ 'gallows',
+ 'garbage',
+ 'graffiti',
+ 'headquarters',
+ 'health',
+ 'herpes',
+ 'highjinks',
+ 'homework',
+ 'housework',
+ 'information',
+ 'jeans',
+ 'justice',
+ 'kudos',
+ 'labour',
+ 'literature',
+ 'machinery',
+ 'mackerel',
+ 'mail',
+ 'media',
+ 'mews',
+ 'moose',
+ 'music',
+ 'news',
+ 'pike',
+ 'plankton',
+ 'pliers',
+ 'pollution',
+ 'premises',
+ 'rain',
+ 'research',
+ 'rice',
+ 'salmon',
+ 'scissors',
+ 'series',
+ 'sewage',
+ 'shambles',
+ 'shrimp',
+ 'species',
+ 'staff',
+ 'swine',
+ 'trout',
+ 'traffic',
+ 'transporation',
+ 'tuna',
+ 'wealth',
+ 'welfare',
+ 'whiting',
+ 'wildebeest',
+ 'wildlife',
+ 'you',
+ // Regexes.
+ /pox$/i, // "chickpox", "smallpox"
+ /ois$/i,
+ /deer$/i, // "deer", "reindeer"
+ /fish$/i, // "fish", "blowfish", "angelfish"
+ /sheep$/i,
+ /measles$/i,
+ /[^aeiou]ese$/i // "chinese", "japanese"
+ ].forEach(pluralize.addUncountableRule);
+
+ return pluralize;
+});
\ No newline at end of file
[04/19] ambari git commit: AMBARI-18439 - [Grafana] Add Kafka-Offset
dashboard for Storm (prajwal)
Posted by nc...@apache.org.
AMBARI-18439 - [Grafana] Add Kafka-Offset dashboard for Storm (prajwal)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7c8ada18
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7c8ada18
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7c8ada18
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7c8ada187d9f80ecc93a901b403c4e13821888b5
Parents: 69e8f6f
Author: Prajwal Rao <pr...@gmail.com>
Authored: Mon Sep 26 13:54:08 2016 -0700
Committer: Prajwal Rao <pr...@gmail.com>
Committed: Mon Sep 26 13:54:08 2016 -0700
----------------------------------------------------------------------
.../ambari-metrics/datasource.js | 64 +++++
.../HDF/grafana-storm-kafka-offset.json | 258 +++++++++++++++++++
.../HDP/grafana-storm-kafka-offset.json | 258 +++++++++++++++++++
3 files changed, 580 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7c8ada18/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
index fa0cc47..626439d 100644
--- a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
+++ b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
@@ -418,6 +418,21 @@ define([
}));
}
+ //Templatized Dashboard for Storm Kafka Offset
+ if (templateSrv.variables[0].query === "topologies" && templateSrv.variables[1] &&
+ templateSrv.variables[1].name === "topic") {
+ var selectedTopology = templateSrv._values.topologies;
+ var selectedTopic = templateSrv._values.topic;
+ metricsPromises.push(_.map(options.targets, function(target) {
+ target.sTopology = selectedTopology;
+ target.sTopic = selectedTopic;
+ target.sPartition = options.scopedVars.partition.value;
+ target.sTopoMetric = target.metric.replace('*', target.sTopology).replace('*', target.sTopic)
+ .replace('*', target.sPartition);
+ return getStormData(target);
+ }));
+ }
+
// To speed up querying on templatized dashboards.
if (templateSrv.variables[1] && templateSrv.variables[1].name === "hosts") {
var allHosts = templateSrv._values.hosts.lastIndexOf('}') > 0 ? templateSrv._values.hosts.slice(1,-1) :
@@ -616,6 +631,55 @@ define([
});
});
}
+ var stormEntities = {};
+ AmbariMetricsDatasource.prototype.getStormEntities = function () {
+ return this.initMetricAppidMapping()
+ .then(function () {
+ var storm = allMetrics["nimbus"];
+ var extractTopologies = storm.filter(/./.test.bind(new
+ RegExp("partition", 'g')));
+ _.map(extractTopologies, function(topology){
+ topology = topology.split('.').slice(0,5);
+ var topologyName = topologyN = topology[1]; // Topology
+ var topologyTopicName = topicN = topology[3]; // Topic
+ var topologyTopicPartitionName = topology[4]; // Partition
+ if (stormEntities[topologyName]) {
+ if (stormEntities[topologyName][topologyTopicName]) {
+ stormEntities[topologyName][topologyTopicName].push(topologyTopicPartitionName);
+ } else {
+ stormEntities[topologyName][topologyTopicName] = [topologyTopicPartitionName];
+ }
+ } else {
+ stormEntities[topologyName] = {};
+ stormEntities[topologyName][topologyTopicName] = [topologyTopicPartitionName];
+ }
+ });
+ });
+ };
+ //Templated Variables for Storm Topics per Topology
+ if (interpolated.includes("stormTopic")) {
+ var topicName = interpolated.substring(0,interpolated.indexOf('.'));
+ return this.getStormEntities().then(function () {
+ var topicNames = Object.keys(stormEntities[topicName]);
+ return _.map(topicNames, function(names){
+ return {
+ text: names
+ };
+ });
+ });
+ }
+ //Templated Variables for Storm Partitions per Topic
+ if (interpolated.includes("stormPartition")) {
+ var topicN, topologyN;
+ return this.getStormEntities().then(function () {
+ var partitionNames = _.uniq(stormEntities[topologyN][topicN]);
+ return _.map(partitionNames, function(names){
+ return {
+ text: names
+ };
+ });
+ });
+ }
// Templated Variable for YARN Queues.
// It will search the cluster and populate the queues.
if(interpolated === "yarnqueues") {
http://git-wip-us.apache.org/repos/asf/ambari/blob/7c8ada18/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-storm-kafka-offset.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-storm-kafka-offset.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-storm-kafka-offset.json
new file mode 100644
index 0000000..ac1f829
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-storm-kafka-offset.json
@@ -0,0 +1,258 @@
+{
+ "id": null,
+ "title": "Storm - Kafka-Offset",
+ "originalTitle": "Storm - Kafka-Offset",
+ "tags": [
+ "storm",
+ "builtin",
+ "2.4.0.0"
+ ],
+ "style": "dark",
+ "timezone": "browser",
+ "editable": true,
+ "hideControls": false,
+ "sharedCrosshair": false,
+ "rows": [
+ {
+ "collapse": false,
+ "editable": true,
+ "height": "25px",
+ "panels": [
+ {
+ "content": "<h4 align=\"center\">Metrics to see the status for the Storm topics on a per partition level. Click on each row title to expand on demand to look at various metrics. </h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.</h6>\n<h5 align=\"center\">Note: Period ('.') contained topology names are not supported.</h5>",
+ "editable": true,
+ "error": false,
+ "id": 14,
+ "isNew": true,
+ "links": [],
+ "mode": "html",
+ "span": 12,
+ "style": {},
+ "title": "",
+ "type": "text"
+ }
+ ],
+ "title": "New row"
+ },
+ {
+ "collapse": false,
+ "editable": true,
+ "height": "250px",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 15,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "minSpan": 4,
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "repeat": "partition",
+ "scopedVars": {
+ "partition": {
+ "text": "partition-0",
+ "value": "partition-0",
+ "selected": true
+ }
+ },
+ "seriesOverrides": [],
+ "span": 12,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "alias": "Kafka Offset",
+ "app": "nimbus",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "hosts": "",
+ "metric": "topology.*.kafka-topic.*.*.latestTimeOffset",
+ "precision": "default",
+ "refId": "B",
+ "sPartition": "partition-0",
+ "sTopic": "myKafkaTopic",
+ "sTopoMetric": "topology.kafka-topology.kafka-topic.myKafkaTopic.partition-0.latestTimeOffset",
+ "sTopology": "kafka-topology",
+ "seriesAggregator": "none",
+ "transform": "none"
+ },
+ {
+ "aggregator": "avg",
+ "alias": "Storm Offset",
+ "app": "nimbus",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "topology.*.kafka-topic.*.*.latestCompletedOffset",
+ "precision": "default",
+ "refId": "A",
+ "sComponent": "--acker",
+ "sPartition": "partition-0",
+ "sTopic": "myKafkaTopic",
+ "sTopoMetric": "topology.kafka-topology.kafka-topic.myKafkaTopic.partition-0.latestCompletedOffset",
+ "sTopology": "kafka-topology",
+ "seriesAggregator": "none",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "$partition",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ }
+ ],
+ "title": "New row"
+ }
+ ],
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {
+ "now": true,
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "templating": {
+ "list": [
+ {
+ "allFormat": "glob",
+ "current": {
+ "text": "kafka-topology",
+ "value": "kafka-topology"
+ },
+ "datasource": null,
+ "includeAll": false,
+ "multi": false,
+ "multiFormat": "glob",
+ "name": "topologies",
+ "options": [
+ ],
+ "query": "topologies",
+ "refresh": true,
+ "regex": "",
+ "type": "query"
+ },
+ {
+ "allFormat": "glob",
+ "current": {
+ "text": "",
+ "value": ""
+ },
+ "datasource": null,
+ "includeAll": false,
+ "multi": false,
+ "multiFormat": "glob",
+ "name": "topic",
+ "options": [
+
+ ],
+ "query": "$topologies.stormTopic",
+ "refresh": true,
+ "regex": "",
+ "type": "query"
+ },
+ {
+ "allFormat": "glob",
+ "current": {
+ "text": "partition-0",
+ "value": "partition-0"
+ },
+ "datasource": null,
+ "includeAll": true,
+ "multi": true,
+ "multiFormat": "glob",
+ "name": "partition",
+ "options": [
+ {
+ "text": "partition-0",
+ "value": "partition-0",
+ "selected": true
+ }
+ ],
+ "query": "$topic.stormPartition",
+ "refresh": true,
+ "type": "query"
+ }
+ ]
+ },
+ "annotations": {
+ "list": []
+ },
+ "refresh": false,
+ "schemaVersion": 8,
+ "version": 23,
+ "links": [
+ {
+ "asDropdown": true,
+ "icon": "external link",
+ "tags": [
+ "storm"
+ ],
+ "title": "Storm Dashboards",
+ "type": "dashboards"
+ }
+ ]
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/7c8ada18/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-storm-kafka-offset.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-storm-kafka-offset.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-storm-kafka-offset.json
new file mode 100644
index 0000000..ac1f829
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-storm-kafka-offset.json
@@ -0,0 +1,258 @@
+{
+ "id": null,
+ "title": "Storm - Kafka-Offset",
+ "originalTitle": "Storm - Kafka-Offset",
+ "tags": [
+ "storm",
+ "builtin",
+ "2.4.0.0"
+ ],
+ "style": "dark",
+ "timezone": "browser",
+ "editable": true,
+ "hideControls": false,
+ "sharedCrosshair": false,
+ "rows": [
+ {
+ "collapse": false,
+ "editable": true,
+ "height": "25px",
+ "panels": [
+ {
+ "content": "<h4 align=\"center\">Metrics to see the status for the Storm topics on a per partition level. Click on each row title to expand on demand to look at various metrics. </h4>\n<h6 style=\"color:red;\" align=\"center\">This dashboard is managed by Ambari. You may lose any changes made to this dashboard. If you want to customize, make your own copy.</h6>\n<h5 align=\"center\">Note: Period ('.') contained topology names are not supported.</h5>",
+ "editable": true,
+ "error": false,
+ "id": 14,
+ "isNew": true,
+ "links": [],
+ "mode": "html",
+ "span": 12,
+ "style": {},
+ "title": "",
+ "type": "text"
+ }
+ ],
+ "title": "New row"
+ },
+ {
+ "collapse": false,
+ "editable": true,
+ "height": "250px",
+ "panels": [
+ {
+ "aliasColors": {},
+ "bars": false,
+ "datasource": null,
+ "editable": true,
+ "error": false,
+ "fill": 1,
+ "grid": {
+ "leftLogBase": 1,
+ "leftMax": null,
+ "leftMin": null,
+ "rightLogBase": 1,
+ "rightMax": null,
+ "rightMin": null,
+ "threshold1": null,
+ "threshold1Color": "rgba(216, 200, 27, 0.27)",
+ "threshold2": null,
+ "threshold2Color": "rgba(234, 112, 112, 0.22)"
+ },
+ "id": 15,
+ "isNew": true,
+ "legend": {
+ "avg": false,
+ "current": false,
+ "max": false,
+ "min": false,
+ "show": true,
+ "total": false,
+ "values": false
+ },
+ "lines": true,
+ "linewidth": 2,
+ "links": [],
+ "minSpan": 4,
+ "nullPointMode": "connected",
+ "percentage": false,
+ "pointradius": 5,
+ "points": false,
+ "renderer": "flot",
+ "repeat": "partition",
+ "scopedVars": {
+ "partition": {
+ "text": "partition-0",
+ "value": "partition-0",
+ "selected": true
+ }
+ },
+ "seriesOverrides": [],
+ "span": 12,
+ "stack": false,
+ "steppedLine": false,
+ "targets": [
+ {
+ "aggregator": "avg",
+ "alias": "Kafka Offset",
+ "app": "nimbus",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "hosts": "",
+ "metric": "topology.*.kafka-topic.*.*.latestTimeOffset",
+ "precision": "default",
+ "refId": "B",
+ "sPartition": "partition-0",
+ "sTopic": "myKafkaTopic",
+ "sTopoMetric": "topology.kafka-topology.kafka-topic.myKafkaTopic.partition-0.latestTimeOffset",
+ "sTopology": "kafka-topology",
+ "seriesAggregator": "none",
+ "transform": "none"
+ },
+ {
+ "aggregator": "avg",
+ "alias": "Storm Offset",
+ "app": "nimbus",
+ "downsampleAggregator": "avg",
+ "errors": {},
+ "metric": "topology.*.kafka-topic.*.*.latestCompletedOffset",
+ "precision": "default",
+ "refId": "A",
+ "sComponent": "--acker",
+ "sPartition": "partition-0",
+ "sTopic": "myKafkaTopic",
+ "sTopoMetric": "topology.kafka-topology.kafka-topic.myKafkaTopic.partition-0.latestCompletedOffset",
+ "sTopology": "kafka-topology",
+ "seriesAggregator": "none",
+ "transform": "none"
+ }
+ ],
+ "timeFrom": null,
+ "timeShift": null,
+ "title": "$partition",
+ "tooltip": {
+ "shared": true,
+ "value_type": "cumulative"
+ },
+ "type": "graph",
+ "x-axis": true,
+ "y-axis": true,
+ "y_formats": [
+ "short",
+ "short"
+ ]
+ }
+ ],
+ "title": "New row"
+ }
+ ],
+ "time": {
+ "from": "now-6h",
+ "to": "now"
+ },
+ "timepicker": {
+ "now": true,
+ "refresh_intervals": [
+ "5s",
+ "10s",
+ "30s",
+ "1m",
+ "5m",
+ "15m",
+ "30m",
+ "1h",
+ "2h",
+ "1d"
+ ],
+ "time_options": [
+ "5m",
+ "15m",
+ "1h",
+ "6h",
+ "12h",
+ "24h",
+ "2d",
+ "7d",
+ "30d"
+ ]
+ },
+ "templating": {
+ "list": [
+ {
+ "allFormat": "glob",
+ "current": {
+ "text": "kafka-topology",
+ "value": "kafka-topology"
+ },
+ "datasource": null,
+ "includeAll": false,
+ "multi": false,
+ "multiFormat": "glob",
+ "name": "topologies",
+ "options": [
+ ],
+ "query": "topologies",
+ "refresh": true,
+ "regex": "",
+ "type": "query"
+ },
+ {
+ "allFormat": "glob",
+ "current": {
+ "text": "",
+ "value": ""
+ },
+ "datasource": null,
+ "includeAll": false,
+ "multi": false,
+ "multiFormat": "glob",
+ "name": "topic",
+ "options": [
+
+ ],
+ "query": "$topologies.stormTopic",
+ "refresh": true,
+ "regex": "",
+ "type": "query"
+ },
+ {
+ "allFormat": "glob",
+ "current": {
+ "text": "partition-0",
+ "value": "partition-0"
+ },
+ "datasource": null,
+ "includeAll": true,
+ "multi": true,
+ "multiFormat": "glob",
+ "name": "partition",
+ "options": [
+ {
+ "text": "partition-0",
+ "value": "partition-0",
+ "selected": true
+ }
+ ],
+ "query": "$topic.stormPartition",
+ "refresh": true,
+ "type": "query"
+ }
+ ]
+ },
+ "annotations": {
+ "list": []
+ },
+ "refresh": false,
+ "schemaVersion": 8,
+ "version": 23,
+ "links": [
+ {
+ "asDropdown": true,
+ "icon": "external link",
+ "tags": [
+ "storm"
+ ],
+ "title": "Storm Dashboards",
+ "type": "dashboards"
+ }
+ ]
+}
\ No newline at end of file
[06/19] ambari git commit: AMBARI-18466. Component should be renamed
to "Microsoft R Server Client". (Attila Doroszlai via stoader)
Posted by nc...@apache.org.
AMBARI-18466. Component should be renamed to "Microsoft R Server Client". (Attila Doroszlai via stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6d352282
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6d352282
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6d352282
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6d3522825b962e926fe5e244b706a619c74b0116
Parents: 5af6d54
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Tue Sep 27 11:20:06 2016 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Tue Sep 27 11:20:06 2016 +0200
----------------------------------------------------------------------
.../scripts/shared_initialization.py | 2 +-
.../MICROSOFT_R/8.0.0/metainfo.xml | 4 ++--
.../MICROSOFT_R/8.0.0/service_advisor.py | 22 ++++++++++----------
3 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/6d352282/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index 2182fd1..397c22d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -177,7 +177,7 @@ def create_dirs():
def create_microsoft_r_dir():
import params
- if 'MICROSOFT_R_CLIENT' in params.component_list and params.default_fs:
+ if 'MICROSOFT_R_SERVER_CLIENT' in params.component_list and params.default_fs:
directory = '/user/RevoShare'
try:
params.HdfsResource(directory,
http://git-wip-us.apache.org/repos/asf/ambari/blob/6d352282/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/metainfo.xml b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/metainfo.xml
index 6998efc..709929e 100644
--- a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/metainfo.xml
+++ b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/metainfo.xml
@@ -27,8 +27,8 @@
<components>
<component>
- <name>MICROSOFT_R_CLIENT</name>
- <displayName>Microsoft R Client</displayName>
+ <name>MICROSOFT_R_SERVER_CLIENT</name>
+ <displayName>Microsoft R Server Client</displayName>
<category>CLIENT</category>
<cardinality>1+</cardinality>
<commandScript>
http://git-wip-us.apache.org/repos/asf/ambari/blob/6d352282/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/service_advisor.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/service_advisor.py b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/service_advisor.py
index 891d2c9..58f0dbe 100644
--- a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/service_advisor.py
+++ b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/service_advisor.py
@@ -37,19 +37,19 @@ except Exception as e:
class MICROSOFT_R800ServiceAdvisor(service_advisor.ServiceAdvisor):
def colocateService(self, hostsComponentsMap, serviceComponents):
- # colocate R_CLIENT with NODEMANAGERs and YARN_CLIENTs
- rClientComponent = [component for component in serviceComponents if component["StackServiceComponents"]["component_name"] == "MICROSOFT_R_CLIENT"]
+ # colocate R_SERVER_CLIENT with NODEMANAGERs and YARN_CLIENTs
+ rClientComponent = [component for component in serviceComponents if component["StackServiceComponents"]["component_name"] == "MICROSOFT_R_SERVER_CLIENT"]
traceback.print_tb(None)
rClientComponent = rClientComponent[0]
if not self.isComponentHostsPopulated(rClientComponent):
for hostName in hostsComponentsMap.keys():
hostComponents = hostsComponentsMap[hostName]
if ({"name": "NODEMANAGER"} in hostComponents or {"name": "YARN_CLIENT"} in hostComponents) \
- and {"name": "MICROSOFT_R_CLIENT"} not in hostComponents:
- hostsComponentsMap[hostName].append({ "name": "MICROSOFT_R_CLIENT" })
+ and {"name": "MICROSOFT_R_SERVER_CLIENT"} not in hostComponents:
+ hostsComponentsMap[hostName].append({ "name": "MICROSOFT_R_SERVER_CLIENT" })
if ({"name": "NODEMANAGER"} not in hostComponents and {"name": "YARN_CLIENT"} not in hostComponents) \
- and {"name": "MICROSOFT_R_CLIENT"} in hostComponents:
- hostsComponentsMap[hostName].remove({"name": "MICROSOFT_R_CLIENT"})
+ and {"name": "MICROSOFT_R_SERVER_CLIENT"} in hostComponents:
+ hostsComponentsMap[hostName].remove({"name": "MICROSOFT_R_SERVER_CLIENT"})
def getServiceComponentLayoutValidations(self, services, hosts):
componentsListList = [service["components"] for service in services["services"]]
@@ -57,17 +57,17 @@ class MICROSOFT_R800ServiceAdvisor(service_advisor.ServiceAdvisor):
hostsList = [host["Hosts"]["host_name"] for host in hosts["items"]]
hostsCount = len(hostsList)
- rClientHosts = self.getHosts(componentsList, "MICROSOFT_R_CLIENT")
+ rClientHosts = self.getHosts(componentsList, "MICROSOFT_R_SERVER_CLIENT")
expectedrClientHosts = set(self.getHosts(componentsList, "NODEMANAGER")) | set(self.getHosts(componentsList, "YARN_CLIENT"))
items = []
- # Generate WARNING if any R_CLIENT is not colocated with NODEMANAGER or YARN_CLIENT
+ # Generate WARNING if any R_SERVER_CLIENT is not colocated with NODEMANAGER or YARN_CLIENT
mismatchHosts = sorted(expectedrClientHosts.symmetric_difference(set(rClientHosts)))
if len(mismatchHosts) > 0:
hostsString = ', '.join(mismatchHosts)
- message = "Microsoft-R Client must be installed on NodeManagers and YARN Clients. " \
+ message = "Microsoft R Server Client must be installed on NodeManagers and YARN Clients. " \
"The following {0} host(s) do not satisfy the colocation recommendation: {1}".format(len(mismatchHosts), hostsString)
- items.append( { "type": 'host-component', "level": 'WARN', "message": message, "component-name": 'MICROSOFT_R_CLIENT' } )
+ items.append( { "type": 'host-component', "level": 'WARN', "message": message, "component-name": 'MICROSOFT_R_SERVER_CLIENT' } )
- return items
\ No newline at end of file
+ return items
[02/19] ambari git commit: AMBARI-18464. Provide Warnings When ulimit
Is High To Prevent Heartbeat Lost Issues (aonishuk)
Posted by nc...@apache.org.
AMBARI-18464. Provide Warnings When ulimit Is High To Prevent Heartbeat Lost Issues (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ee4e63a9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ee4e63a9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ee4e63a9
Branch: refs/heads/branch-dev-patch-upgrade
Commit: ee4e63a9a75131119f3e157eeb1f8f1462a1798f
Parents: 8192601
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 26 19:09:52 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 26 19:09:52 2016 +0300
----------------------------------------------------------------------
ambari-server/src/main/resources/alerts.json | 31 ++++++++
.../main/resources/host_scripts/alert_ulimit.py | 83 ++++++++++++++++++++
.../test/python/host_scripts/TestAlertUlimit.py | 44 +++++++++++
3 files changed, 158 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ee4e63a9/ambari-server/src/main/resources/alerts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/alerts.json b/ambari-server/src/main/resources/alerts.json
index 9cffff5..2559b3a 100644
--- a/ambari-server/src/main/resources/alerts.json
+++ b/ambari-server/src/main/resources/alerts.json
@@ -149,7 +149,38 @@
}
]
}
+ },
+ {
+ "name": "ambari_agent_ulimit",
+ "label": "Ulimit for open files",
+ "description": "This host-level alert is triggered if value of ulimit for open files (-n) goes above specific thresholds. The default threshold values are 200000 for WARNING and 800000 for CRITICAL.",
+ "interval": 1,
+ "scope": "HOST",
+ "enabled": true,
+ "source": {
+ "type": "SCRIPT",
+ "path": "alert_ulimit.py",
+ "parameters": [
+ {
+ "name": "ulimit.warning.threshold",
+ "display_name": "Warning",
+ "value": 200000,
+ "type": "NUMERIC",
+ "description": "The threshold of ulimit for open files (-n) for WARNING alert.",
+ "threshold": "WARNING"
+ },
+ {
+ "name": "ulimit.critical.threshold",
+ "display_name": "Critical",
+ "value": 800000,
+ "type": "NUMERIC",
+ "description": "The threshold of ulimit for open files (-n) for CRITICAL alert.",
+ "threshold": "CRITICAL"
+ }
+ ]
+ }
}
+
]
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/ee4e63a9/ambari-server/src/main/resources/host_scripts/alert_ulimit.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/host_scripts/alert_ulimit.py b/ambari-server/src/main/resources/host_scripts/alert_ulimit.py
new file mode 100644
index 0000000..8c57b84
--- /dev/null
+++ b/ambari-server/src/main/resources/host_scripts/alert_ulimit.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import resource
+
+WARNING_KEY = "ulimit.warning.threshold"
+CRITICAL_KEY = "ulimit.critical.threshold"
+
+DEFAULT_WARNING_KEY = 200000
+DEFAULT_CRITICAL_KEY = 800000
+
+def get_tokens():
+ """
+ Returns a tuple of tokens in the format {{site/property}} that will be used
+ to build the dictionary passed into execute
+ """
+ return None
+
+def execute(configurations={}, parameters={}, host_name=None):
+ """
+ Performs advanced ulimit checks under Linux.
+
+ Returns a tuple containing the result code and a pre-formatted result label
+
+ Keyword arguments:
+ configurations (dictionary): a mapping of configuration key to value
+ parameters (dictionary): a mapping of script parameter key to value
+ host_name (string): the name of this host where the alert is running
+
+ """
+
+ # try:
+ soft_ulimit, hard_ulimiit = resource.getrlimit(resource.RLIMIT_NOFILE)
+ return_code, label = _get_warnings_for_partition(parameters, soft_ulimit)
+ # except Exception as e:
+ # return 'CRITICAL', ["Unable to determine ulimit for open files (-n)"]
+
+ return return_code, [label]
+
+def _get_warnings_for_partition(parameters, soft_ulimit):
+
+ # start with hard coded defaults
+ warning_count = DEFAULT_WARNING_KEY
+ critical_count = DEFAULT_CRITICAL_KEY
+
+ if WARNING_KEY in parameters:
+ warning_count = int(parameters[WARNING_KEY])
+
+ if CRITICAL_KEY in parameters:
+ critical_count = int(parameters[CRITICAL_KEY])
+
+ if soft_ulimit is None or soft_ulimit == "":
+ return 'CRITICAL', ['Unable to determine ulimit for open files (-n)']
+
+ return_code = "OK"
+ label = "Ulimit for open files (-n) is {0}".format(soft_ulimit)
+
+ if soft_ulimit >= critical_count:
+ label = "Ulimit for open files (-n) is {0} which is higher or equal than critical value of {1}".format(soft_ulimit, critical_count)
+ return_code = 'CRITICAL'
+ elif soft_ulimit >= warning_count:
+ label = "Ulimit for open files (-n) is {0} which is higher or equal than warning value of {1}".format(soft_ulimit, warning_count)
+ return_code = 'WARNING'
+
+ return return_code, label
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/ee4e63a9/ambari-server/src/test/python/host_scripts/TestAlertUlimit.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/host_scripts/TestAlertUlimit.py b/ambari-server/src/test/python/host_scripts/TestAlertUlimit.py
new file mode 100644
index 0000000..09bf4e6
--- /dev/null
+++ b/ambari-server/src/test/python/host_scripts/TestAlertUlimit.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import alert_ulimit
+from mock.mock import patch, MagicMock
+from unittest import TestCase
+
+
+class TestAlertUlimit(TestCase):
+
+ @patch('resource.getrlimit')
+ def test_ulimits(self, ulimit_mock):
+
+ # OK
+ ulimit_mock.return_value = 1024, 1024
+ res = alert_ulimit.execute()
+ self.assertEquals(res, ('OK', ['Ulimit for open files (-n) is 1024']))
+
+ # WARNING
+ ulimit_mock.return_value = 200000, 200000
+ res = alert_ulimit.execute()
+ self.assertEquals(res, ('WARNING', ['Ulimit for open files (-n) is 200000 which is higher or equal than warning value of 200000']))
+
+ # OK
+ ulimit_mock.return_value = 1000000, 1000000
+ res = alert_ulimit.execute()
+ self.assertEquals(res, ('CRITICAL', ['Ulimit for open files (-n) is 1000000 which is higher or equal than critical value of 800000']))
\ No newline at end of file
[15/19] ambari git commit: AMBARI-18401. Allow running a subset of
Python unit tests. (Attila Doroszlai via stoader)
Posted by nc...@apache.org.
AMBARI-18401. Allow running a subset of Python unit tests. (Attila Doroszlai via stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/39858cca
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/39858cca
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/39858cca
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 39858ccafcee6c49bba21d7385d7129d71dc8851
Parents: 2700bd1
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Wed Sep 28 13:58:32 2016 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Wed Sep 28 13:58:32 2016 +0200
----------------------------------------------------------------------
ambari-server/pom.xml | 2 ++
ambari-server/src/test/python/unitTests.py | 25 ++++++++++---------------
2 files changed, 12 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/39858cca/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 354b6cb..d507b82 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -28,6 +28,7 @@
<!-- On centos the python xml's are inside python package -->
<deb.architecture>amd64</deb.architecture>
<custom.tests>false</custom.tests>
+ <python.test.mask>[Tt]est*.py</python.test.mask>
<hdpUrlForCentos6>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.1.1.0</hdpUrlForCentos6>
<hdpLatestUrl>http://public-repo-1.hortonworks.com/HDP/hdp_urlinfo.json</hdpLatestUrl>
<ambari_commons.install.dir>/usr/lib/ambari-server/lib/ambari_commons</ambari_commons.install.dir>
@@ -621,6 +622,7 @@
<arguments>
<argument>unitTests.py</argument>
<argument>${custom.tests}</argument>
+ <argument>${python.test.mask}</argument>
</arguments>
<environmentVariables>
<PYTHONPATH>${path.python.1}${pathsep}$PYTHONPATH</PYTHONPATH>
http://git-wip-us.apache.org/repos/asf/ambari/blob/39858cca/ambari-server/src/test/python/unitTests.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/unitTests.py b/ambari-server/src/test/python/unitTests.py
index 037b6a5..7941ed3 100644
--- a/ambari-server/src/test/python/unitTests.py
+++ b/ambari-server/src/test/python/unitTests.py
@@ -86,17 +86,12 @@ def get_stack_name():
def get_stack_name():
return "HDP"
-def stack_test_executor(base_folder, service, stack, custom_tests, executor_result):
+def stack_test_executor(base_folder, service, stack, test_mask, executor_result):
"""
Stack tests executor. Must be executed in separate process to prevent module
name conflicts in different stacks.
"""
#extract stack scripts folders
- if custom_tests:
- test_mask = CUSTOM_TEST_MASK
- else:
- test_mask = TEST_MASK
-
server_src_dir = get_parent_path(base_folder, 'src')
script_folders = set()
@@ -152,10 +147,14 @@ def stack_test_executor(base_folder, service, stack, custom_tests, executor_resu
def main():
if not os.path.exists(newtmpdirpath): os.makedirs(newtmpdirpath)
- custom_tests = False
- if len(sys.argv) > 1:
- if sys.argv[1] == "true":
- custom_tests = True
+
+ if len(sys.argv) > 1 and sys.argv[1] == "true": # handle custom_tests for backward-compatibility
+ test_mask = CUSTOM_TEST_MASK
+ elif len(sys.argv) > 2:
+ test_mask = sys.argv[2]
+ else:
+ test_mask = TEST_MASK
+
pwd = os.path.abspath(os.path.dirname(__file__))
ambari_server_folder = get_parent_path(pwd, 'ambari-server')
@@ -212,7 +211,7 @@ def main():
args=(variant['directory'],
variant['service'],
variant['stack'],
- custom_tests,
+ test_mask,
executor_result)
)
process.start()
@@ -238,10 +237,6 @@ def main():
#run base ambari-server tests
sys.stderr.write("Running tests for ambari-server\n")
- if custom_tests:
- test_mask = CUSTOM_TEST_MASK
- else:
- test_mask = TEST_MASK
test_dirs = [
(os.path.join(pwd, 'custom_actions'), "\nRunning tests for custom actions\n"),
[09/19] ambari git commit: AMBARI-18051 - Services should be able to
provide their own pre-req checks by supplying a jar file
Posted by nc...@apache.org.
AMBARI-18051 - Services should be able to provide their own pre-req checks by supplying a jar file
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7b924342
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7b924342
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7b924342
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7b92434290f1e092e228bcc74584d4a4630ac392
Parents: 53b4bd4
Author: Tim Thorpe <tt...@apache.org>
Authored: Tue Sep 27 08:21:56 2016 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Tue Sep 27 08:21:56 2016 -0700
----------------------------------------------------------------------
.../server/sample/checks/SampleServiceCheck.java | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/7b924342/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java b/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
index c91793e..1c16040 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/sample/checks/SampleServiceCheck.java
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package org.apache.ambari.server.sample.checks;
import org.apache.ambari.server.AmbariException;
[19/19] ambari git commit: Merge branch 'trunk' into
branch-dev-patch-upgrade
Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2d60c526
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2d60c526
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2d60c526
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 2d60c52674f33e0ecd1a11e9b4a24e16978c8c14
Parents: 75b656c 05e65e2
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Sep 28 13:27:48 2016 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Sep 28 13:27:48 2016 -0400
----------------------------------------------------------------------
.../libraries/functions/get_user_call_output.py | 13 +-
.../ambari-metrics/datasource.js | 64 +++
ambari-server/pom.xml | 55 ++
.../ambari/server/checks/CheckDescription.java | 5 +-
.../server/checks/UpgradeCheckRegistry.java | 76 +++
.../PreUpgradeCheckResourceProvider.java | 27 +-
.../server/stack/CommonServiceDirectory.java | 50 +-
.../ambari/server/stack/ServiceDirectory.java | 112 +++-
.../ambari/server/stack/ServiceModule.java | 8 +-
.../apache/ambari/server/stack/StackModule.java | 37 +-
.../server/stack/StackServiceDirectory.java | 81 +--
.../stack/UpdateActiveRepoVersionOnStartup.java | 9 +-
.../apache/ambari/server/state/ServiceInfo.java | 14 +
.../python/ambari_server/dbConfiguration.py | 52 +-
ambari-server/src/main/resources/alerts.json | 31 ++
.../HDF/grafana-kafka-home.json | 10 +-
.../HDF/grafana-storm-kafka-offset.json | 258 +++++++++
.../HDP/grafana-kafka-home.json | 10 +-
.../HDP/grafana-storm-kafka-offset.json | 258 +++++++++
.../2.1.0.2.0/package/scripts/params_linux.py | 7 +
.../YARN/2.1.0.2.0/package/scripts/yarn.py | 548 +++++++++----------
.../main/resources/host_scripts/alert_ulimit.py | 83 +++
.../scripts/shared_initialization.py | 2 +-
.../PreUpgradeCheckResourceProviderTest.java | 255 +++++++++
.../sample/checks/SampleServiceCheck.java | 52 ++
.../ambari/server/stack/ServiceModuleTest.java | 30 +
.../server/stack/StackManagerExtensionTest.java | 7 +
.../UpdateActiveRepoVersionOnStartupTest.java | 28 +-
.../src/test/python/TestAmbariServer.py | 107 ++--
.../test/python/host_scripts/TestAlertUlimit.py | 44 ++
.../stacks/2.0.6/YARN/test_historyserver.py | 1 -
.../stacks/2.0.6/YARN/test_resourcemanager.py | 54 +-
.../stacks/2.1/YARN/test_apptimelineserver.py | 40 +-
.../test/python/stacks/2.3/YARN/test_ats_1_5.py | 188 +++----
ambari-server/src/test/python/unitTests.py | 25 +-
ambari-web/app/assets/licenses/NOTICE.txt | 3 +
.../service/widgets/create/step2_controller.js | 26 +
ambari-web/app/messages.js | 13 +-
.../app/mixins/common/configs/configs_loader.js | 2 +-
ambari-web/app/models/cluster_states.js | 4 +-
ambari-web/app/routes/add_kerberos_routes.js | 4 +-
.../main/service/widgets/create/expression.hbs | 10 +-
.../main/service/widgets/create/step2_graph.hbs | 6 +-
.../service/widgets/create/step2_number.hbs | 6 +-
.../service/widgets/create/step2_template.hbs | 6 +-
ambari-web/app/utils/string_utils.js | 7 +-
.../views/common/configs/config_history_flow.js | 46 +-
.../app/views/common/rolling_restart_view.js | 19 +-
.../views/main/dashboard/widgets/hdfs_links.js | 2 +-
ambari-web/app/views/main/service/item.js | 3 +-
.../service/widgets/create/expression_view.js | 16 +-
ambari-web/brunch-config.js | 3 +-
.../resourceManager/wizard_controller_test.js | 1 -
.../common/configs/configs_loader_test.js | 10 +-
ambari-web/test/models/cluster_test.js | 12 +-
.../objects/service_config_property_test.js | 31 +-
.../configs/theme/sub_section_tab_test.js | 2 +-
.../test/views/main/host/log_metrics_test.js | 1 -
ambari-web/test/views/main/host_test.js | 4 +-
ambari-web/vendor/scripts/pluralize.js | 461 ++++++++++++++++
.../MICROSOFT_R/8.0.0/metainfo.xml | 4 +-
.../MICROSOFT_R/8.0.0/service_advisor.py | 22 +-
62 files changed, 2623 insertions(+), 742 deletions(-)
----------------------------------------------------------------------
[18/19] ambari git commit: AMBARI-18051 - Services should be able to
provide their own pre-req checks by supplying a jar file
Posted by nc...@apache.org.
AMBARI-18051 - Services should be able to provide their own pre-req checks by supplying a jar file
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20ce57b7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20ce57b7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20ce57b7
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 20ce57b7bb578dd337007d4411c695c95d6bf287
Parents: aad2133
Author: Tim Thorpe <tt...@apache.org>
Authored: Wed Sep 28 09:28:06 2016 -0700
Committer: Tim Thorpe <tt...@apache.org>
Committed: Wed Sep 28 09:28:06 2016 -0700
----------------------------------------------------------------------
ambari-server/pom.xml | 23 +++++++++++++++++++++--
1 file changed, 21 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/20ce57b7/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index d507b82..e37accd 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -578,6 +578,25 @@
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <version>1.7</version>
+ <executions>
+ <execution>
+ <id>clean-sample-upgrade-check-jar</id>
+ <phase>process-test-classes</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <delete dir="target/test-classes/checks" includeemptydirs="true"/>
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
<executions>
@@ -603,9 +622,9 @@
<goal>run</goal>
</goals>
<configuration>
- <tasks>
+ <target>
<mkdir dir="target/test-classes/extensions/EXT/0.1/services/OOZIE2/checks/tmp"/>
- </tasks>
+ </target>
</configuration>
</execution>
</executions>
[05/19] ambari git commit: AMBARI-18468 - [Grafana] Incorrect metric
values displayed when there are multiple Kafka Brokers (prajwal)
Posted by nc...@apache.org.
AMBARI-18468 - [Grafana] Incorrect metric values displayed when there are multiple Kafka Brokers (prajwal)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5af6d547
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5af6d547
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5af6d547
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5af6d547fa9c5d225e011e18297b2cef703237a9
Parents: 7c8ada1
Author: Prajwal Rao <pr...@gmail.com>
Authored: Mon Sep 26 14:12:46 2016 -0700
Committer: Prajwal Rao <pr...@gmail.com>
Committed: Mon Sep 26 14:12:46 2016 -0700
----------------------------------------------------------------------
.../files/grafana-dashboards/HDF/grafana-kafka-home.json | 10 +++++-----
.../files/grafana-dashboards/HDP/grafana-kafka-home.json | 10 +++++-----
2 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/5af6d547/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json
index b754231..5ec4404 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDF/grafana-kafka-home.json
@@ -233,7 +233,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "Active Controller Count",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -291,7 +291,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "Replica MaxLag",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -349,7 +349,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "Leader Count",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -416,7 +416,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "UnderReplicatedPartitions",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -473,7 +473,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "OfflinePartitionsCount",
"app": "kafka_broker",
"downsampleAggregator": "avg",
http://git-wip-us.apache.org/repos/asf/ambari/blob/5af6d547/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-kafka-home.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-kafka-home.json b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-kafka-home.json
index b754231..5ec4404 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-kafka-home.json
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/files/grafana-dashboards/HDP/grafana-kafka-home.json
@@ -233,7 +233,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "Active Controller Count",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -291,7 +291,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "Replica MaxLag",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -349,7 +349,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "Leader Count",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -416,7 +416,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "UnderReplicatedPartitions",
"app": "kafka_broker",
"downsampleAggregator": "avg",
@@ -473,7 +473,7 @@
},
"targets": [
{
- "aggregator": "avg",
+ "aggregator": "sum",
"alias": "OfflinePartitionsCount",
"app": "kafka_broker",
"downsampleAggregator": "avg",