You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2014/01/31 19:50:06 UTC
[4/4] git commit: AMBARI-4358. Add stack extension support for
pluggable services (dlysnichenko)
AMBARI-4358. Add stack extension support for pluggable services (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/37f11ebd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/37f11ebd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/37f11ebd
Branch: refs/heads/trunk
Commit: 37f11ebda45eeac550ecacb78a25b40cd563825f
Parents: f2146a4
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Wed Jan 22 18:09:38 2014 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Jan 31 20:46:45 2014 +0200
----------------------------------------------------------------------
.../ambari_agent/CustomServiceOrchestrator.py | 9 +-
.../src/main/python/ambari_agent/FileCache.py | 28 +-
.../TestCustomServiceOrchestrator.py | 2 +-
.../test/python/ambari_agent/TestFileCache.py | 31 +-
.../ambari/server/agent/ExecutionCommand.java | 3 +-
.../ambari/server/agent/HeartbeatMonitor.java | 7 +-
.../server/api/services/AmbariMetaInfo.java | 24 +-
.../server/api/util/StackExtensionHelper.java | 247 +-
.../AmbariCustomCommandExecutionHelper.java | 25 +-
.../server/state/CommandScriptDefinition.java | 29 +
.../ambari/server/state/ComponentInfo.java | 25 +
.../server/state/CustomCommandDefinition.java | 27 +
.../apache/ambari/server/state/ServiceInfo.java | 25 +-
.../apache/ambari/server/state/StackInfo.java | 23 +-
.../server/api/services/AmbariMetaInfoTest.java | 191 +
.../api/util/StackExtensionHelperTest.java | 4 +-
.../AmbariManagementControllerTest.java | 2 +-
.../python/stacks/1.3.3/configs/default.json | 2 +-
.../python/stacks/1.3.3/configs/secured.json | 2 +-
.../python/stacks/2.1.1/configs/default.json | 2 +-
.../python/stacks/2.1.1/configs/secured.json | 2 +-
.../stacks/HDP/2.0.6/hooks/dummy-script.py | 21 +
.../resources/stacks/HDP/2.0.7/metainfo.xml | 2 +-
.../services/HBASE/package/dummy-script.py | 21 +
.../HDP/2.0.7/services/HBASE/scripts/hbase.py | 19 -
.../services/HBASE/scripts/hbase_client.py | 19 -
.../services/HBASE/scripts/hbase_master.py | 19 -
.../HBASE/scripts/hbase_regionserver.py | 19 -
.../stacks/HDP/2.0.7/services/HDFS/metainfo.xml | 61 +-
.../2.0.7/services/HDFS/package/dummy-script.py | 21 +
.../HDFS/package/files/checkForFormat.sh | 62 -
.../services/HDFS/package/files/checkWebUI.py | 53 -
.../services/HDFS/package/scripts/datanode.py | 57 -
.../HDFS/package/scripts/hdfs_client.py | 49 -
.../HDFS/package/scripts/hdfs_datanode.py | 56 -
.../HDFS/package/scripts/hdfs_namenode.py | 180 -
.../HDFS/package/scripts/hdfs_snamenode.py | 53 -
.../HDFS/package/scripts/journalnode.py | 74 -
.../services/HDFS/package/scripts/namenode.py | 61 -
.../services/HDFS/package/scripts/params.py | 180 -
.../HDFS/package/scripts/service_check.py | 107 -
.../services/HDFS/package/scripts/snamenode.py | 64 -
.../HDFS/package/scripts/status_params.py | 31 -
.../services/HDFS/package/scripts/utils.py | 138 -
.../services/HDFS/package/scripts/zkfc_slave.py | 62 -
.../stacks/HDP/2.0.8/hooks/dummy-script.py | 21 +
.../resources/stacks/HDP/2.0.8/metainfo.xml | 24 +
.../stacks/HDP/2.0.8/repos/repoinfo.xml | 61 +
.../stacks/HDP/2.0.8/role_command_order.json | 100 +
.../HDP/2.0.8/services/HBASE/metainfo.xml | 81 +
.../stacks/HDP/2.0.8/services/HDFS/metainfo.xml | 147 +
.../stacks/HDP/2.0.8/services/HDFS/metrics.json | 7800 ++++++++++++++++++
.../2.0.8/services/HDFS/package/dummy-script.py | 21 +
53 files changed, 8970 insertions(+), 1424 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 7ffc1c9..95ad2cd 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -98,12 +98,9 @@ class CustomServiceOrchestrator():
else:
if command_name == self.CUSTOM_COMMAND_COMMAND:
command_name = command['hostLevelParams']['custom_command']
- stack_name = command['hostLevelParams']['stack_name']
- stack_version = command['hostLevelParams']['stack_version']
- hook_dir = self.file_cache.get_hook_base_dir(stack_name, stack_version)
- metadata_folder = command['commandParams']['service_metadata_folder']
- base_dir = self.file_cache.get_service_base_dir(
- stack_name, stack_version, metadata_folder, component_name)
+ hook_dir = self.file_cache.get_hook_base_dir(command)
+ service_subpath = command['commandParams']['service_package_folder']
+ base_dir = self.file_cache.get_service_base_dir(service_subpath)
script_path = self.resolve_script_path(base_dir, script, script_type)
script_tuple = (script_path, base_dir)
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-agent/src/main/python/ambari_agent/FileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/FileCache.py b/ambari-agent/src/main/python/ambari_agent/FileCache.py
index eafb592..01d2e52 100644
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@ -42,32 +42,30 @@ class FileCache():
self.cache_dir = config.get('agent', 'cache_dir')
- def get_service_base_dir(self, stack_name, stack_version, service, component):
+ def get_service_base_dir(self, service_subpath):
"""
Returns a base directory for service
"""
- metadata_path = os.path.join(self.cache_dir, "stacks", str(stack_name),
- str(stack_version), "services", str(service),
- "package")
- if not os.path.isdir(metadata_path):
+ service_base_dir = os.path.join(self.cache_dir, "stacks", service_subpath)
+ if not os.path.isdir(service_base_dir):
# TODO: Metadata downloading will be implemented at Phase 2
# As of now, all stack definitions are packaged and distributed with
# agent rpm
- message = "Metadata dir for not found for a service " \
- "(stackName = {0}, stackVersion = {1}, " \
- "service = {2}, " \
- "component = {3}".format(stack_name, stack_version,
- service, component)
+ message = "Service base dir not found at expected location {0}".\
+ format(service_base_dir)
raise AgentException(message)
- return metadata_path
+ return service_base_dir
- def get_hook_base_dir(self, stack_name, stack_version):
+ def get_hook_base_dir(self, command):
"""
- Returns a base directory for service
+ Returns a base directory for hooks
"""
- hook_base_path = os.path.join(self.cache_dir, "stacks", str(stack_name),
- str(stack_version), "hooks")
+ try:
+ hooks_subpath = command['commandParams']['hooks_folder']
+ except KeyError:
+ return None
+ hook_base_path = os.path.join(self.cache_dir, "stacks", hooks_subpath)
if not os.path.isdir(hook_base_path):
# TODO: Metadata downloading will be implemented at Phase 2
# As of now, all stack definitions are packaged and distributed with
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index dadc793..971048b 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -149,7 +149,7 @@ class TestCustomServiceOrchestrator(TestCase):
'script_type': 'PYTHON',
'script': 'scripts/hbase_regionserver.py',
'command_timeout': '600',
- 'service_metadata_folder' : 'HBASE'
+ 'service_package_folder' : 'HBASE'
},
'taskId' : '3',
'roleCommand': 'INSTALL'
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
index ae84268..5e389d5 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
@@ -58,15 +58,14 @@ class TestFileCache(TestCase):
fileCache = FileCache(self.config)
# Check existing dir case
isdir_mock.return_value = True
- base = fileCache.get_service_base_dir("HDP", "2.0.7",
- "HBASE", "REGION_SERVER")
- self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.0.7/"
- "services/HBASE/package")
+ service_subpath = "HDP/2.1.1/services/ZOOKEEPER/package"
+ base = fileCache.get_service_base_dir(service_subpath)
+ self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.1.1/"
+ "services/ZOOKEEPER/package")
# Check absent dir case
isdir_mock.return_value = False
try:
- fileCache.get_service_base_dir("HDP", "2.0.7",
- "HBASE", "REGION_SERVER")
+ fileCache.get_service_base_dir(service_subpath)
self.fail("Should throw an exception")
except AgentException:
pass # Expected
@@ -77,14 +76,28 @@ class TestFileCache(TestCase):
@patch("os.path.isdir")
def test_get_hook_base_dir(self, isdir_mock):
fileCache = FileCache(self.config)
+ # Check missing parameter
+ command = {
+ 'commandParams' : {
+ }
+ }
+ base = fileCache.get_hook_base_dir(command)
+ self.assertEqual(base, None)
+
# Check existing dir case
isdir_mock.return_value = True
- base = fileCache.get_hook_base_dir("HDP", "2.0.7")
- self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.0.7/hooks")
+ command = {
+ 'commandParams' : {
+ 'hooks_folder' : 'HDP/2.1.1/hooks'
+ }
+ }
+ base = fileCache.get_hook_base_dir(command)
+ self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.1.1/hooks")
+
# Check absent dir case
isdir_mock.return_value = False
try:
- fileCache.get_hook_base_dir("HDP", "2.0.7")
+ fileCache.get_hook_base_dir(command)
self.fail("Should throw an exception")
except AgentException:
pass # Expected
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index c430c13..3ce7da2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -239,7 +239,8 @@ public class ExecutionCommand extends AgentCommand {
String COMMAND_TIMEOUT = "command_timeout";
String SCRIPT = "script";
String SCRIPT_TYPE = "script_type";
- String SERVICE_METADATA_FOLDER = "service_metadata_folder";
+ String SERVICE_PACKAGE_FOLDER = "service_package_folder";
+ String HOOKS_FOLDER = "hooks_folder";
String STACK_NAME = "stack_name";
String STACK_VERSION = "stack_version";
String SERVICE_REPO_INFO = "service_repo_info";
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 6616b4f..2babd6b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -198,6 +198,8 @@ public class HeartbeatMonitor implements Runnable {
ComponentInfo componentInfo = ambariMetaInfo.getComponent(
stackId.getStackName(), stackId.getStackVersion(),
serviceName, componentName);
+ StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ stackId.getStackVersion());
Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>();
@@ -260,8 +262,9 @@ public class HeartbeatMonitor implements Runnable {
}
}
commandParams.put(COMMAND_TIMEOUT, commandTimeout);
- commandParams.put(SERVICE_METADATA_FOLDER,
- serviceInfo.getServiceMetadataFolder());
+ commandParams.put(SERVICE_PACKAGE_FOLDER,
+ serviceInfo.getServicePackageFolder());
+ commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
// Fill host level params
Map<String, String> hostLevelParams = statusCmd.getHostLevelParams();
hostLevelParams.put(STACK_NAME, stackId.getStackName());
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index 78131f2..8e1e787 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -93,7 +93,7 @@ public class AmbariMetaInfo {
@Override
public boolean accept(File dir, String s) {
if (s.equals(".svn") || s.equals(".git") ||
- s.equals(HOOKS_DIR)) // Hooks dir is not a service
+ s.equals(StackExtensionHelper.HOOKS_FOLDER_NAME)) // Hooks dir is not a service
{
return false;
}
@@ -108,7 +108,6 @@ public class AmbariMetaInfo {
private static final List<String> ALL_SUPPORTED_OS = Arrays.asList(
"centos5", "redhat5", "centos6", "redhat6", "oraclelinux5",
"oraclelinux6", "suse11", "sles11", "ubuntu12");
- private final static String HOOKS_DIR = "hooks";
private final ActionDefinitionManager adManager = new ActionDefinitionManager();
private String serverVersion = "undefined";
private List<StackInfo> stacksResult = new ArrayList<StackInfo>();
@@ -657,15 +656,16 @@ public class AmbariMetaInfo {
}
private void getConfigurationInformation(File stackRoot) throws Exception {
+ String stackRootAbsPath = stackRoot.getAbsolutePath();
if (LOG.isDebugEnabled()) {
LOG.debug("Loading stack information"
- + ", stackRoot = " + stackRoot.getAbsolutePath());
+ + ", stackRoot = " + stackRootAbsPath);
}
if (!stackRoot.isDirectory() && !stackRoot.exists())
throw new IOException("" + Configuration.METADETA_DIR_PATH
+ " should be a directory with stack"
- + ", stackRoot = " + stackRoot.getAbsolutePath());
+ + ", stackRoot = " + stackRootAbsPath);
StackExtensionHelper stackExtensionHelper = new StackExtensionHelper
(stackRoot);
@@ -674,7 +674,7 @@ public class AmbariMetaInfo {
List<StackInfo> stacks = stackExtensionHelper.getAllAvailableStacks();
if (stacks.isEmpty()) {
throw new AmbariException("Unable to find stack definitions under " +
- "stackRoot = " + stackRoot.getAbsolutePath());
+ "stackRoot = " + stackRootAbsPath);
}
for (StackInfo stack : stacks) {
@@ -684,9 +684,11 @@ public class AmbariMetaInfo {
stacksResult.add(stack);
+ String stackPath = stackRootAbsPath + File.separator +
+ stack.getName() + File.separator + stack.getVersion();
+
// get repository data for current stack of techs
- File repositoryFolder = new File(stackRoot.getAbsolutePath()
- + File.separator + stack.getName() + File.separator + stack.getVersion()
+ File repositoryFolder = new File(stackPath
+ File.separator + REPOSITORY_FOLDER_NAME + File.separator
+ REPOSITORY_FILE_NAME);
@@ -707,13 +709,19 @@ public class AmbariMetaInfo {
+ ", repoFolder=" + repositoryFolder.getPath());
}
+ // Populate services
List<ServiceInfo> services = stackExtensionHelper
.getAllApplicableServices(stack);
-
stack.setServices(services);
+
+ // Resolve hooks folder
+ String stackHooksToUse = stackExtensionHelper.
+ resolveHooksFolder(stack);
+ stack.setStackHooksFolder(stackHooksToUse);
}
}
+
public String getServerVersion() {
return serverVersion;
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
index 0883ad4..13e1dd2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
@@ -50,6 +50,14 @@ import org.xml.sax.SAXException;
/**
* Helper methods for providing stack extension behavior -
* Apache Jira: AMBARI-2819
+ *
+ * Stack extension processing is done in two steps. At first step, we parse
+ * all information for every stack from stack files. At second step, we
+ * go through parent and perform inheritance where needed. At both steps,
+ * stacks are processed at random order, that's why extension implementation
+ * for any new stack/service/component property should also consist of two
+ * separate steps (otherwise child may happen to be processed before parent's
+ * properties are populated).
*/
public class StackExtensionHelper {
private File stackRoot;
@@ -58,6 +66,8 @@ public class StackExtensionHelper {
private final Map<String, StackInfo> stackVersionMap = new HashMap<String,
StackInfo>();
private Map<String, List<StackInfo>> stackParentsMap = null;
+ public final static String HOOKS_FOLDER_NAME = "hooks";
+ private static final String PACKAGE_FOLDER_NAME = "package";
private static final Map<Class<?>, JAXBContext> _jaxbContexts =
new HashMap<Class<?>, JAXBContext> ();
@@ -111,39 +121,50 @@ public class StackExtensionHelper {
private ServiceInfo mergeServices(ServiceInfo parentService,
ServiceInfo childService) {
- // TODO: Allow extending stack with custom services
ServiceInfo mergedServiceInfo = new ServiceInfo();
mergedServiceInfo.setName(childService.getName());
mergedServiceInfo.setComment(childService.getComment());
mergedServiceInfo.setUser(childService.getUser());
mergedServiceInfo.setVersion(childService.getVersion());
mergedServiceInfo.setConfigDependencies(childService.getConfigDependencies());
+
+ Map<String, ServiceOsSpecific> osSpecific = childService.getOsSpecifics();
+ if (! osSpecific.isEmpty()) {
+ mergedServiceInfo.setOsSpecifics(childService.getOsSpecifics());
+ } else {
+ mergedServiceInfo.setOsSpecifics(parentService.getOsSpecifics());
+ }
+
+ CommandScriptDefinition commandScript = childService.getCommandScript();
+ if (commandScript != null) {
+ mergedServiceInfo.setCommandScript(childService.getCommandScript());
+ } else {
+ mergedServiceInfo.setCommandScript(parentService.getCommandScript());
+ }
+
+ String servicePackageFolder = childService.getServicePackageFolder();
+ if (servicePackageFolder != null) {
+ mergedServiceInfo.setServicePackageFolder(servicePackageFolder);
+ } else {
+ mergedServiceInfo.setServicePackageFolder(
+ parentService.getServicePackageFolder());
+ }
+
+ // Merge custom command definitions for service
+ List<CustomCommandDefinition> mergedCustomCommands =
+ mergeCustomCommandLists(parentService.getCustomCommands(),
+ childService.getCustomCommands());
+ mergedServiceInfo.setCustomCommands(mergedCustomCommands);
// metrics
if (null == childService.getMetricsFile() && null != parentService.getMetricsFile())
mergedServiceInfo.setMetricsFile(parentService.getMetricsFile());
-
- // Add all child components to service
+
+ populateComponents(mergedServiceInfo, parentService, childService);
+
+ // Add child properties not deleted
List<String> deleteList = new ArrayList<String>();
List<String> appendList = new ArrayList<String>();
- for (ComponentInfo childComponentInfo : childService.getComponents()) {
- if (!childComponentInfo.isDeleted()) {
- mergedServiceInfo.getComponents().add(childComponentInfo);
- appendList.add(childComponentInfo.getName());
- } else {
- deleteList.add(childComponentInfo.getName());
- }
- }
- // Add remaining parent components
- for (ComponentInfo parentComponent : parentService.getComponents()) {
- if (!deleteList.contains(parentComponent.getName()) && !appendList
- .contains(parentComponent.getName())) {
- mergedServiceInfo.getComponents().add(parentComponent);
- }
- }
- // Add child properties not deleted
- deleteList = new ArrayList<String>();
- appendList = new ArrayList<String>();
for (PropertyInfo propertyInfo : childService.getProperties()) {
if (!propertyInfo.isDeleted()) {
mergedServiceInfo.getProperties().add(propertyInfo);
@@ -170,7 +191,92 @@ public class StackExtensionHelper {
}
return mergedServiceInfo;
}
-
+
+
+ /**
+ * Merges component sets of parentService and childService and writes result
+ * to mergedServiceInfo
+ */
+ private void populateComponents(ServiceInfo mergedServiceInfo, ServiceInfo parentService,
+ ServiceInfo childService) {
+ // Add all child components to service
+ List<String> deleteList = new ArrayList<String>();
+ List<String> appendList = new ArrayList<String>();
+
+ for (ComponentInfo childComponent : childService.getComponents()) {
+ if (!childComponent.isDeleted()) {
+ ComponentInfo parentComponent = getComponent(parentService,
+ childComponent.getName());
+ if (parentComponent != null) { // If parent has similar component
+ ComponentInfo mergedComponent = mergeComponents(parentComponent,
+ childComponent);
+ mergedServiceInfo.getComponents().add(mergedComponent);
+ appendList.add(mergedComponent.getName());
+ } else {
+ mergedServiceInfo.getComponents().add(childComponent);
+ appendList.add(childComponent.getName());
+ }
+ } else {
+ deleteList.add(childComponent.getName());
+ }
+ }
+ // Add remaining parent components
+ for (ComponentInfo parentComponent : parentService.getComponents()) {
+ if (!deleteList.contains(parentComponent.getName()) && !appendList
+ .contains(parentComponent.getName())) {
+ mergedServiceInfo.getComponents().add(parentComponent);
+ }
+ }
+ }
+
+
+ private ComponentInfo getComponent(ServiceInfo service, String componentName) {
+ for (ComponentInfo component : service.getComponents()) {
+ if (component.getName().equals(componentName)) {
+ return component;
+ }
+ }
+ return null;
+ }
+
+
+ private ComponentInfo mergeComponents(ComponentInfo parent, ComponentInfo child) {
+ ComponentInfo result = new ComponentInfo(child); // cloning child
+ CommandScriptDefinition commandScript = child.getCommandScript();
+ if (commandScript != null) {
+ result.setCommandScript(child.getCommandScript());
+ } else {
+ result.setCommandScript(parent.getCommandScript());
+ }
+
+ // Merge custom command definitions for service
+ List<CustomCommandDefinition> mergedCustomCommands =
+ mergeCustomCommandLists(parent.getCustomCommands(),
+ child.getCustomCommands());
+ result.setCustomCommands(mergedCustomCommands);
+
+ return result;
+ }
+
+
+ private List<CustomCommandDefinition> mergeCustomCommandLists(
+ List<CustomCommandDefinition> parentList,
+ List<CustomCommandDefinition> childList) {
+ List<CustomCommandDefinition> mergedList =
+ new ArrayList<CustomCommandDefinition>(childList);
+ List<String> existingNames = new ArrayList<String>();
+ for (CustomCommandDefinition childCCD : childList) {
+ existingNames.add(childCCD.getName());
+ }
+ for (CustomCommandDefinition parentsCCD : parentList) {
+ if (! existingNames.contains(parentsCCD.getName())) {
+ mergedList.add(parentsCCD);
+ existingNames.add(parentsCCD.getName());
+ }
+ }
+ return mergedList;
+ }
+
public List<ServiceInfo> getAllApplicableServices(StackInfo stackInfo) {
LinkedList<StackInfo> parents = (LinkedList<StackInfo>)
@@ -208,6 +314,37 @@ public class StackExtensionHelper {
return new ArrayList<ServiceInfo>(serviceInfoMap.values());
}
+
+ /**
+ * Determines exact hooks folder (subpath from stackRoot to hooks directory)
+ * to use for a given stack. If given stack
+ * has not hooks folder, inheritance hierarhy is queried.
+ * @param stackInfo stack to work with
+ */
+ public String resolveHooksFolder(StackInfo stackInfo) throws AmbariException {
+ // Determine hooks folder for stack
+ String stackId = String.format("%s-%s",
+ stackInfo.getName(), stackInfo.getVersion());
+ String hooksFolder = stackInfo.getStackHooksFolder();
+ if (hooksFolder == null) {
+ // Try to get parent's
+ List<StackInfo> parents = getParents(stackInfo);
+ for (StackInfo parent : parents) {
+ hooksFolder = parent.getStackHooksFolder();
+ if (hooksFolder != null) {
+ break;
+ }
+ }
+ }
+ if (hooksFolder == null) {
+ String message = String.format(
+ "Can not determine hooks dir for stack %s",
+ stackId);
+ LOG.debug(message);
+ }
+ return hooksFolder;
+ }
+
void populateServicesForStack(StackInfo stackInfo) throws
ParserConfigurationException, SAXException,
XPathExpressionException, IOException, JAXBException {
@@ -218,7 +355,6 @@ public class StackExtensionHelper {
if (!servicesFolder.exists()) {
LOG.info("No services defined for stack: " + stackInfo.getName() +
"-" + stackInfo.getVersion());
-
} else {
try {
File[] servicesFolders = servicesFolder.listFiles(AmbariMetaInfo
@@ -267,9 +403,14 @@ public class StackExtensionHelper {
List<ServiceInfo> serviceInfos = smiv2x.getServices();
for (ServiceInfo serviceInfo : serviceInfos) {
serviceInfo.setSchemaVersion(AmbariMetaInfo.SCHEMA_VERSION_2);
- serviceInfo.setServiceMetadataFolder(serviceFolder.getName());
- // TODO: allow repository overriding when extending stack
+ // Find service package folder
+ String servicePackageDir = resolveServicePackageFolder(
+ stackRoot.getAbsolutePath(), stackInfo,
+ serviceFolder.getName(), serviceInfo.getName());
+ serviceInfo.setServicePackageFolder(servicePackageDir);
+
+ // process metrics.json
if (metricsJson.exists())
serviceInfo.setMetricsFile(metricsJson);
@@ -290,10 +431,49 @@ public class StackExtensionHelper {
}
+ /**
+ * Determines exact service directory that contains scripts and templates
+ * for service. If given stack has not this folder, inheritance hierarhy is
+ * queried.
+ */
+ String resolveServicePackageFolder(String stackRoot,
+ StackInfo stackInfo, String serviceFolderName,
+ String serviceName) throws AmbariException {
+ String stackId = String.format("%s-%s",
+ stackInfo.getName(), stackInfo.getVersion());
+ String expectedSubPath = stackInfo.getName() + File.separator +
+ stackInfo.getVersion() + File.separator +
+ AmbariMetaInfo.SERVICES_FOLDER_NAME +
+ File.separator + serviceFolderName + File.separator +
+ PACKAGE_FOLDER_NAME;
+ File packageDir = new File(stackRoot + File.separator + expectedSubPath);
+ String servicePackageFolder = null;
+ if (packageDir.isDirectory()) {
+ servicePackageFolder = expectedSubPath;
+ String message = String.format(
+ "Service package folder for service %s" +
+ "for stack %s has been resolved to %s",
+ serviceName, stackId, servicePackageFolder);
+ LOG.debug(message);
+ } else {
+ String message = String.format(
+ "Service package folder %s for service %s " +
+ "for stack %s does not exist.",
+ packageDir.getAbsolutePath(), serviceName, stackId);
+ LOG.debug(message);
+ }
+ return servicePackageFolder;
+ }
+
+
public List<StackInfo> getAllAvailableStacks() {
return new ArrayList<StackInfo>(stackVersionMap.values());
}
+ public List<StackInfo> getParents(StackInfo stackInfo) {
+ return stackParentsMap.get(stackInfo.getVersion());
+ }
+
private Map<String, List<StackInfo>> getParentStacksInOrder(
Collection<StackInfo> stacks) {
Map<String, List<StackInfo>> parentStacksMap = new HashMap<String,
@@ -372,7 +552,22 @@ public class StackExtensionHelper {
stackInfo.setMinUpgradeVersion(smx.getVersion().getUpgrade());
stackInfo.setActive(smx.getVersion().isActive());
stackInfo.setParentStackVersion(smx.getExtends());
- String rcoFileLocation = stackVersionFolder.getAbsolutePath() + File.separator + AmbariMetaInfo.RCO_FILE_NAME;
+
+ // Populating hooks dir for stack
+ String hooksSubPath = stackInfo.getName() + File.separator +
+ stackInfo.getVersion() + File.separator + HOOKS_FOLDER_NAME;
+ String hooksAbsPath = stackVersionFolder.getAbsolutePath() +
+ File.separator + HOOKS_FOLDER_NAME;
+ if (new File(hooksAbsPath).exists()) {
+ stackInfo.setStackHooksFolder(hooksSubPath);
+ } else {
+ String message = String.format("Hooks folder %s does not exist",
+ hooksAbsPath);
+ LOG.debug(message);
+ }
+
+ String rcoFileLocation = stackVersionFolder.getAbsolutePath() +
+ File.separator + AmbariMetaInfo.RCO_FILE_NAME;
if (new File(rcoFileLocation).exists())
stackInfo.setRcoFileLocation(rcoFileLocation);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 72fc31d..55022cf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -45,6 +45,7 @@ import org.apache.ambari.server.state.ServiceComponentHostEvent;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.ServiceOsSpecific;
import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
import org.apache.ambari.server.state.State;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
import org.apache.ambari.server.utils.StageUtils;
@@ -64,6 +65,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_T
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
@@ -75,7 +77,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_METADATA_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
@@ -221,6 +223,8 @@ public class AmbariCustomCommandExecutionHelper {
ServiceInfo serviceInfo =
ambariMetaInfo.getServiceInfo(stackId.getStackName(),
stackId.getStackVersion(), serviceName);
+ StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ stackId.getStackVersion());
long nowTimestamp = System.currentTimeMillis();
@@ -281,8 +285,9 @@ public class AmbariCustomCommandExecutionHelper {
}
commandParams.put(COMMAND_TIMEOUT, commandTimeout);
- commandParams.put(SERVICE_METADATA_FOLDER,
- serviceInfo.getServiceMetadataFolder());
+ commandParams.put(SERVICE_PACKAGE_FOLDER,
+ serviceInfo.getServicePackageFolder());
+ commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
execCmd.setCommandParams(commandParams);
}
@@ -360,6 +365,8 @@ public class AmbariCustomCommandExecutionHelper {
ServiceInfo serviceInfo =
ambariMetaInfo.getServiceInfo(stackId.getStackName(),
stackId.getStackVersion(), serviceName);
+ StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ stackId.getStackVersion());
stage.addHostRoleExecutionCommand(hostname,
@@ -414,8 +421,9 @@ public class AmbariCustomCommandExecutionHelper {
}
commandParams.put(COMMAND_TIMEOUT, commandTimeout);
- commandParams.put(SERVICE_METADATA_FOLDER,
- serviceInfo.getServiceMetadataFolder());
+ commandParams.put(SERVICE_PACKAGE_FOLDER,
+ serviceInfo.getServicePackageFolder());
+ commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
execCmd.setCommandParams(commandParams);
@@ -570,6 +578,8 @@ public class AmbariCustomCommandExecutionHelper {
ComponentInfo componentInfo = ambariMetaInfo.getComponent(
stackId.getStackName(), stackId.getStackVersion(),
serviceName, componentName);
+ StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+ stackId.getStackVersion());
ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
scHost.getServiceComponentName()).getExecutionCommand();
@@ -612,8 +622,9 @@ public class AmbariCustomCommandExecutionHelper {
}
}
commandParams.put(COMMAND_TIMEOUT, commandTimeout);
- commandParams.put(SERVICE_METADATA_FOLDER,
- serviceInfo.getServiceMetadataFolder());
+ commandParams.put(SERVICE_PACKAGE_FOLDER,
+ serviceInfo.getServicePackageFolder());
+ commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
execCmd.setCommandParams(commandParams);
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/state/CommandScriptDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/CommandScriptDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/state/CommandScriptDefinition.java
index 3394ecd..56e7438 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/CommandScriptDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/CommandScriptDefinition.java
@@ -19,6 +19,9 @@
package org.apache.ambari.server.state;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@@ -60,4 +63,30 @@ public class CommandScriptDefinition {
PUPPET // TODO: Not supported yet. Do we really need it?
}
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (obj == this) {
+ return true;
+ }
+ if (! (obj instanceof CommandScriptDefinition)) {
+ return false;
+ }
+
+ CommandScriptDefinition rhs = (CommandScriptDefinition) obj;
+ return new EqualsBuilder().
+ append(script, rhs.script).
+ append(scriptType, rhs.scriptType).
+ append(timeout, rhs.timeout).isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 31).
+ append(script).
+ append(scriptType).
+ append(timeout).toHashCode();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
index 8798ef1..f03bd8c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
@@ -63,6 +63,23 @@ public class ComponentInfo {
@XmlElement(name="auto-deploy")
private AutoDeployInfo m_autoDeploy;
+ public ComponentInfo() {
+ }
+
+ /**
+ * Copy constructor.
+ */
+ public ComponentInfo(ComponentInfo prototype) {
+ name = prototype.name;
+ category = prototype.category;
+ deleted = prototype.deleted;
+ cardinality = prototype.cardinality;
+ commandScript = prototype.commandScript;
+ customCommands = prototype.customCommands;
+ dependencies = prototype.dependencies;
+ m_autoDeploy = prototype.m_autoDeploy;
+ }
+
public String getName() {
return name;
}
@@ -103,6 +120,10 @@ public class ComponentInfo {
return commandScript;
}
+ public void setCommandScript(CommandScriptDefinition commandScript) {
+ this.commandScript = commandScript;
+ }
+
public List<CustomCommandDefinition> getCustomCommands() {
if (customCommands == null) {
customCommands = new ArrayList<CustomCommandDefinition>();
@@ -110,6 +131,10 @@ public class ComponentInfo {
return customCommands;
}
+ public void setCustomCommands(List<CustomCommandDefinition> customCommands) {
+ this.customCommands = customCommands;
+ }
+
public boolean isCustomCommand(String commandName) {
if (customCommands != null && commandName != null) {
for (CustomCommandDefinition cc: customCommands) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/state/CustomCommandDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/CustomCommandDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/state/CustomCommandDefinition.java
index ab4143f..a26e7be 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/CustomCommandDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/CustomCommandDefinition.java
@@ -17,6 +17,9 @@
*/
package org.apache.ambari.server.state;
+import org.apache.commons.lang.builder.EqualsBuilder;
+import org.apache.commons.lang.builder.HashCodeBuilder;
+
import javax.xml.bind.annotation.*;
/**
@@ -36,4 +39,28 @@ public class CustomCommandDefinition {
return commandScript;
}
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (obj == this) {
+ return true;
+ }
+ if (! (obj instanceof CustomCommandDefinition)) {
+ return false;
+ }
+
+ CustomCommandDefinition rhs = (CustomCommandDefinition) obj;
+ return new EqualsBuilder().
+ append(name, rhs.name).
+ append(commandScript, rhs.commandScript).isEquals();
+ }
+
+ @Override
+ public int hashCode() {
+ return new HashCodeBuilder(17, 31).
+ append(name).
+ append(commandScript).toHashCode();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
index c45531f..c12363c 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ServiceInfo.java
@@ -95,12 +95,13 @@ public class ServiceInfo {
/**
- * Directory, that contains service metadata. Since schema ver 2,
+ * Meaning: stores subpath from stack root to exact directory, that contains
+ * service scripts and templates. Since schema ver 2,
* we may have multiple service metadata inside folder.
* Added at schema ver 2
*/
@XmlTransient
- private String serviceMetadataFolder;
+ private String servicePackageFolder;
public boolean isDeleted() {
return isDeleted;
@@ -286,12 +287,12 @@ public class ServiceInfo {
}
- public String getServiceMetadataFolder() {
- return serviceMetadataFolder;
+ public String getServicePackageFolder() {
+ return servicePackageFolder;
}
- public void setServiceMetadataFolder(String serviceMetadataFolder) {
- this.serviceMetadataFolder = serviceMetadataFolder;
+ public void setServicePackageFolder(String servicePackageFolder) {
+ this.servicePackageFolder = servicePackageFolder;
}
/**
@@ -316,6 +317,10 @@ public class ServiceInfo {
return serviceOsSpecificsMap;
}
+ public void setOsSpecifics(Map<String, ServiceOsSpecific> serviceOsSpecificsMap) {
+ this.serviceOsSpecificsMap = serviceOsSpecificsMap;
+ }
+
public List<CustomCommandDefinition> getCustomCommands() {
if (customCommands == null) {
customCommands = new ArrayList<CustomCommandDefinition>();
@@ -323,10 +328,18 @@ public class ServiceInfo {
return customCommands;
}
+ public void setCustomCommands(List<CustomCommandDefinition> customCommands) {
+ this.customCommands = customCommands;
+ }
+
public CommandScriptDefinition getCommandScript() {
return commandScript;
}
+ public void setCommandScript(CommandScriptDefinition commandScript) {
+ this.commandScript = commandScript;
+ }
+
/**
* @param file the file containing the metrics definitions
*/
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index 6affad9..cc1a45e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -23,7 +23,7 @@ import java.util.List;
import org.apache.ambari.server.controller.StackVersionResponse;
-public class StackInfo {
+public class StackInfo implements Comparable<StackInfo>{
private String name;
private String version;
private String minUpgradeVersion;
@@ -33,6 +33,12 @@ public class StackInfo {
private List<ServiceInfo> services;
private String parentStackVersion;
+ /**
+ * Meaning: stores subpath from stack root to exact hooks folder for stack. These hooks are
+ * applied to all commands for services in current stack.
+ */
+ private String stackHooksFolder;
+
public String getName() {
return name;
}
@@ -145,4 +151,19 @@ public class StackInfo {
public void setRcoFileLocation(String rcoFileLocation) {
this.rcoFileLocation = rcoFileLocation;
}
+
+ public String getStackHooksFolder() {
+ return stackHooksFolder;
+ }
+
+ public void setStackHooksFolder(String stackHooksFolder) {
+ this.stackHooksFolder = stackHooksFolder;
+ }
+
+ @Override
+ public int compareTo(StackInfo o) {
+ String myId = name + "-" + version;
+ String oId = o.name + "-" + o.version;
+ return myId.compareTo(oId);
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 3a348e7..2a4310b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -27,6 +27,7 @@ import static org.junit.Assert.fail;
import java.io.File;
import java.lang.reflect.Method;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedList;
@@ -1058,4 +1059,194 @@ public class AmbariMetaInfoTest {
}
}
}
+
+
+ @Test
+ public void testHooksDirInheritance() throws Exception {
+ // Test hook dir determination in parent
+ StackInfo stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, "2.0.6");
+ Assert.assertEquals("HDP/2.0.6/hooks", stackInfo.getStackHooksFolder());
+ // Test hook dir inheritance
+ stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, "2.0.7");
+ Assert.assertEquals("HDP/2.0.6/hooks", stackInfo.getStackHooksFolder());
+ // Test hook dir override
+ stackInfo = metaInfo.getStackInfo(STACK_NAME_HDP, "2.0.8");
+ Assert.assertEquals("HDP/2.0.8/hooks", stackInfo.getStackHooksFolder());
+ }
+
+
+ @Test
+ public void testServicePackageDirInheritance() throws Exception {
+ // Test service package dir determination in parent
+ ServiceInfo service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HBASE");
+ Assert.assertEquals("HDP/2.0.7/services/HBASE/package",
+ service.getServicePackageFolder());
+
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HDFS");
+ Assert.assertEquals("HDP/2.0.7/services/HDFS/package",
+ service.getServicePackageFolder());
+ // Test service package dir inheritance
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HBASE");
+ Assert.assertEquals("HDP/2.0.7/services/HBASE/package",
+ service.getServicePackageFolder());
+ // Test service package dir override
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HDFS");
+ Assert.assertEquals("HDP/2.0.8/services/HDFS/package",
+ service.getServicePackageFolder());
+ }
+
+
+ @Test
+ public void testServiceCommandScriptInheritance() throws Exception {
+ // Test command script determination in parent
+ ServiceInfo service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HDFS");
+ Assert.assertEquals("scripts/service_check_1.py",
+ service.getCommandScript().getScript());
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HBASE");
+ Assert.assertEquals("scripts/service_check.py",
+ service.getCommandScript().getScript());
+ // Test command script inheritance
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HBASE");
+ Assert.assertEquals("scripts/service_check.py",
+ service.getCommandScript().getScript());
+ // Test command script override
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HDFS");
+ Assert.assertEquals("scripts/service_check_2.py",
+ service.getCommandScript().getScript());
+ }
+
+ @Test
+ public void testComponentCommandScriptInheritance() throws Exception {
+ // Test command script determination in parent
+ ComponentInfo component = metaInfo.getComponent(STACK_NAME_HDP,
+ "2.0.7", "HDFS", "HDFS_CLIENT");
+ Assert.assertEquals("scripts/hdfs_client.py",
+ component.getCommandScript().getScript());
+ component = metaInfo.getComponent(STACK_NAME_HDP,
+ "2.0.7", "HBASE", "HBASE_MASTER");
+ Assert.assertEquals("scripts/hbase_master.py",
+ component.getCommandScript().getScript());
+ // Test command script inheritance
+ component = metaInfo.getComponent(STACK_NAME_HDP,
+ "2.0.8", "HBASE", "HBASE_MASTER");
+ Assert.assertEquals("scripts/hbase_master.py",
+ component.getCommandScript().getScript());
+ // Test command script override
+ component = metaInfo.getComponent(STACK_NAME_HDP,
+ "2.0.8", "HDFS", "HDFS_CLIENT");
+ Assert.assertEquals("scripts/hdfs_client_overridden.py",
+ component.getCommandScript().getScript());
+ }
+
+
+ @Test
+ public void testServiceCustomCommandScriptInheritance() throws Exception {
+ // Test custom command script determination in parent
+ ServiceInfo service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HDFS");
+
+ CustomCommandDefinition ccd = findCustomCommand("RESTART", service);
+ Assert.assertEquals("scripts/restart_parent.py",
+ ccd.getCommandScript().getScript());
+
+ ccd = findCustomCommand("YET_ANOTHER_PARENT_SRV_COMMAND", service);
+ Assert.assertEquals("scripts/yet_another_parent_srv_command.py",
+ ccd.getCommandScript().getScript());
+
+ Assert.assertEquals(2, service.getCustomCommands().size());
+
+ // Test custom command script inheritance
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HDFS");
+ Assert.assertEquals(3, service.getCustomCommands().size());
+
+ ccd = findCustomCommand("YET_ANOTHER_PARENT_SRV_COMMAND", service);
+ Assert.assertEquals("scripts/yet_another_parent_srv_command.py",
+ ccd.getCommandScript().getScript());
+
+ // Test custom command script override
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HDFS");
+
+ ccd = findCustomCommand("RESTART", service);
+ Assert.assertEquals("scripts/restart_child.py",
+ ccd.getCommandScript().getScript());
+
+ ccd = findCustomCommand("YET_ANOTHER_CHILD_SRV_COMMAND", service);
+ Assert.assertEquals("scripts/yet_another_child_srv_command.py",
+ ccd.getCommandScript().getScript());
+ }
+
+
+ @Test
+ public void testChildCustomCommandScriptInheritance() throws Exception {
+ // Test custom command script determination in parent
+ ComponentInfo component = metaInfo.getComponent(STACK_NAME_HDP, "2.0.7",
+ "HDFS", "NAMENODE");
+
+ CustomCommandDefinition ccd = findCustomCommand("DECOMMISSION", component);
+ Assert.assertEquals("scripts/namenode_dec.py",
+ ccd.getCommandScript().getScript());
+
+ ccd = findCustomCommand("YET_ANOTHER_PARENT_COMMAND", component);
+ Assert.assertEquals("scripts/yet_another_parent_command.py",
+ ccd.getCommandScript().getScript());
+
+ Assert.assertEquals(2, component.getCustomCommands().size());
+
+ // Test custom command script inheritance
+ component = metaInfo.getComponent(STACK_NAME_HDP, "2.0.8",
+ "HDFS", "NAMENODE");
+ Assert.assertEquals(3, component.getCustomCommands().size());
+
+ ccd = findCustomCommand("YET_ANOTHER_PARENT_COMMAND", component);
+ Assert.assertEquals("scripts/yet_another_parent_command.py",
+ ccd.getCommandScript().getScript());
+
+ // Test custom command script override
+ ccd = findCustomCommand("DECOMMISSION", component);
+ Assert.assertEquals("scripts/namenode_dec_overr.py",
+ ccd.getCommandScript().getScript());
+
+ ccd = findCustomCommand("YET_ANOTHER_CHILD_COMMAND", component);
+ Assert.assertEquals("scripts/yet_another_child_command.py",
+ ccd.getCommandScript().getScript());
+ }
+
+
+ @Test
+ public void testServiceOsSpecificsInheritance() throws Exception {
+ // Test command script determination in parent
+ ServiceInfo service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HDFS");
+ Assert.assertEquals("parent-package-def",
+ service.getOsSpecifics().get("any").getPackages().get(0).getName());
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.7", "HBASE");
+ Assert.assertEquals(2, service.getOsSpecifics().keySet().size());
+ // Test command script inheritance
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HBASE");
+ Assert.assertEquals(2, service.getOsSpecifics().keySet().size());
+ // Test command script override
+ service = metaInfo.getService(STACK_NAME_HDP, "2.0.8", "HDFS");
+ Assert.assertEquals("child-package-def",
+ service.getOsSpecifics().get("any").getPackages().get(0).getName());
+ }
+
+
+ private CustomCommandDefinition findCustomCommand(String ccName,
+ ServiceInfo service) {
+ for (CustomCommandDefinition ccd: service.getCustomCommands()) {
+ if (ccd.getName().equals(ccName)) {
+ return ccd;
+ }
+ }
+ return null;
+ }
+
+ private CustomCommandDefinition findCustomCommand(String ccName,
+ ComponentInfo component) {
+ for (CustomCommandDefinition ccd: component.getCustomCommands()) {
+ if (ccd.getName().equals(ccName)) {
+ return ccd;
+ }
+ }
+ return null;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
index 6edb3df..2e064e8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
@@ -22,7 +22,6 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.state.*;
import java.io.File;
-import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -85,7 +84,8 @@ public class StackExtensionHelperTest {
assertEquals("hive-site", configDependencies.get(1));
} else if (serviceInfo.getName().equals("HBASE")) {
assertEquals("HBASE", serviceInfo.getName());
- assertEquals("HBASE", serviceInfo.getServiceMetadataFolder());
+ assertEquals("HDP/2.0.7/services/HBASE/package",
+ serviceInfo.getServicePackageFolder());
assertEquals("2.0", serviceInfo.getSchemaVersion());
assertTrue(serviceInfo.getComment().startsWith("Non-relational distr"));
assertEquals("0.96.0.2.0.6.0", serviceInfo.getVersion());
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 251aa5f..009e0af 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -146,7 +146,7 @@ public class AmbariManagementControllerTest {
private static final String REPO_ID = "HDP-1.1.1.16";
private static final String PROPERTY_NAME = "hbase.regionserver.msginterval";
private static final String SERVICE_NAME = "HDFS";
- private static final int STACK_VERSIONS_CNT = 10;
+ private static final int STACK_VERSIONS_CNT = 11;
private static final int REPOS_CNT = 3;
private static final int STACKS_CNT = 1;
private static final int STACK_PROPERTIES_CNT = 81;
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/python/stacks/1.3.3/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/configs/default.json b/ambari-server/src/test/python/stacks/1.3.3/configs/default.json
index 70b93f5..6d12470 100644
--- a/ambari-server/src/test/python/stacks/1.3.3/configs/default.json
+++ b/ambari-server/src/test/python/stacks/1.3.3/configs/default.json
@@ -25,7 +25,7 @@
"role": "DATANODE",
"commandParams": {
"command_timeout": "600",
- "service_metadata_folder": "HDFS",
+ "service_package_folder": "HDFS",
"script_type": "PYTHON",
"schema_version": "2.0",
"script": "scripts/datanode.py",
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json b/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
index ac357cc..9520b02 100644
--- a/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/1.3.3/configs/secured.json
@@ -25,7 +25,7 @@
"role": "MYSQL_SERVER",
"commandParams": {
"command_timeout": "600",
- "service_metadata_folder": "HIVE",
+ "service_package_folder": "HIVE",
"script_type": "PYTHON",
"schema_version": "2.0",
"script": "scripts/mysql_server.py",
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/python/stacks/2.1.1/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/configs/default.json b/ambari-server/src/test/python/stacks/2.1.1/configs/default.json
index 5b40256..f1ce054 100644
--- a/ambari-server/src/test/python/stacks/2.1.1/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.1.1/configs/default.json
@@ -21,7 +21,7 @@
"role": "OOZIE_SERVICE_CHECK",
"commandParams": {
"command_timeout": "300",
- "service_metadata_folder": "OOZIE",
+ "service_package_folder": "OOZIE",
"script_type": "PYTHON",
"schema_version": "2.0",
"script": "scripts/service_check.py",
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/python/stacks/2.1.1/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/configs/secured.json b/ambari-server/src/test/python/stacks/2.1.1/configs/secured.json
index b000c56..8477faa 100644
--- a/ambari-server/src/test/python/stacks/2.1.1/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.1.1/configs/secured.json
@@ -26,7 +26,7 @@
"role": "YARN_CLIENT",
"commandParams": {
"command_timeout": "600",
- "service_metadata_folder": "YARN",
+ "service_package_folder": "YARN",
"script_type": "PYTHON",
"schema_version": "2.0",
"script": "scripts/yarn_client.py",
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.6/hooks/dummy-script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.6/hooks/dummy-script.py b/ambari-server/src/test/resources/stacks/HDP/2.0.6/hooks/dummy-script.py
new file mode 100644
index 0000000..a582077
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.6/hooks/dummy-script.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/metainfo.xml
index 07da411..10673b7 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/metainfo.xml
@@ -19,6 +19,6 @@
<versions>
<active>true</active>
</versions>
- <extends>2.0.5</extends>
+ <extends>2.0.6</extends>
</metainfo>
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/package/dummy-script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/package/dummy-script.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/package/dummy-script.py
new file mode 100644
index 0000000..a582077
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/package/dummy-script.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase.py
deleted file mode 100644
index 0a169a4..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_client.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_client.py
deleted file mode 100644
index 8a4d0e0..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_client.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_master.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_master.py
deleted file mode 100644
index 0a169a4..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_master.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_regionserver.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_regionserver.py
deleted file mode 100644
index 0a169a4..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HBASE/scripts/hbase_regionserver.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
index 3de6ce5..4ab509a 100644
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/metainfo.xml
@@ -36,7 +36,15 @@
<customCommand>
<name>DECOMMISSION</name>
<commandScript>
- <script>scripts/namenode.py</script>
+ <script>scripts/namenode_dec.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ <customCommand>
+ <name>YET_ANOTHER_PARENT_COMMAND</name>
+ <commandScript>
+ <script>scripts/yet_another_parent_command.py</script>
<scriptType>PYTHON</scriptType>
<timeout>600</timeout>
</commandScript>
@@ -101,46 +109,37 @@
<packages>
<package>
<type>rpm</type>
- <name>lzo</name>
- </package>
- <package>
- <type>rpm</type>
- <name>hadoop</name>
- </package>
- <package>
- <type>rpm</type>
- <name>hadoop-libhdfs</name>
- </package>
- <package>
- <type>rpm</type>
- <name>hadoop-lzo</name>
- </package>
- <package>
- <type>rpm</type>
- <name>hadoop-lzo-native</name>
- </package>
- <package>
- <type>rpm</type>
- <name>snappy</name>
- </package>
- <package>
- <type>rpm</type>
- <name>snappy-devel</name>
- </package>
- <package>
- <type>rpm</type>
- <name>ambari-log4j</name>
+ <name>parent-package-def</name>
</package>
</packages>
</osSpecific>
</osSpecifics>
<commandScript>
- <script>scripts/service_check.py</script>
+ <script>scripts/service_check_1.py</script>
<scriptType>PYTHON</scriptType>
<timeout>300</timeout>
</commandScript>
+ <customCommands>
+ <customCommand>
+ <name>RESTART</name>
+ <commandScript>
+ <script>scripts/restart_parent.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ <customCommand>
+ <name>YET_ANOTHER_PARENT_SRV_COMMAND</name>
+ <commandScript>
+ <script>scripts/yet_another_parent_srv_command.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>600</timeout>
+ </commandScript>
+ </customCommand>
+ </customCommands>
+
<configuration-dependencies>
<config-type>core-site</config-type>
<config-type>global</config-type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/dummy-script.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/dummy-script.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/dummy-script.py
new file mode 100644
index 0000000..a582077
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/dummy-script.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python2.6
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkForFormat.sh b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkForFormat.sh
deleted file mode 100644
index d14091a..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkForFormat.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export hdfs_user=$1
-shift
-export conf_dir=$1
-shift
-export mark_dir=$1
-shift
-export name_dirs=$*
-
-export EXIT_CODE=0
-export command="namenode -format"
-export list_of_non_empty_dirs=""
-
-mark_file=/var/run/hadoop/hdfs/namenode-formatted
-if [[ -f ${mark_file} ]] ; then
- rm -f ${mark_file}
- mkdir -p ${mark_dir}
-fi
-
-if [[ ! -d $mark_dir ]] ; then
- for dir in `echo $name_dirs | tr ',' ' '` ; do
- echo "NameNode Dirname = $dir"
- cmd="ls $dir | wc -l | grep -q ^0$"
- eval $cmd
- if [[ $? -ne 0 ]] ; then
- (( EXIT_CODE = $EXIT_CODE + 1 ))
- list_of_non_empty_dirs="$list_of_non_empty_dirs $dir"
- fi
- done
-
- if [[ $EXIT_CODE == 0 ]] ; then
- su - ${hdfs_user} -c "yes Y | hadoop --config ${conf_dir} ${command}"
- else
- echo "ERROR: Namenode directory(s) is non empty. Will not format the namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"
- fi
-else
- echo "${mark_dir} exists. Namenode DFS already formatted"
-fi
-
-exit $EXIT_CODE
-
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkWebUI.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkWebUI.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkWebUI.py
deleted file mode 100644
index f8e9c1a..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/files/checkWebUI.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import optparse
-import httplib
-
-#
-# Main.
-#
-def main():
- parser = optparse.OptionParser(usage="usage: %prog [options] component ")
- parser.add_option("-m", "--hosts", dest="hosts", help="Comma separated hosts list for WEB UI to check it availability")
- parser.add_option("-p", "--port", dest="port", help="Port of WEB UI to check it availability")
-
- (options, args) = parser.parse_args()
-
- hosts = options.hosts.split(',')
- port = options.port
-
- for host in hosts:
- try:
- conn = httplib.HTTPConnection(host, port)
- # This can be modified to get a partial url part to be sent with request
- conn.request("GET", "/")
- httpCode = conn.getresponse().status
- conn.close()
- except Exception:
- httpCode = 404
-
- if httpCode != 200:
- print "Cannot access WEB UI on: http://" + host + ":" + port
- exit(1)
-
-
-if __name__ == "__main__":
- main()
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/datanode.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/datanode.py
deleted file mode 100644
index eaa27cf..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/datanode.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from hdfs_datanode import datanode
-
-
-class DataNode(Script):
- def install(self, env):
- import params
-
- self.install_packages(env)
- env.set_params(params)
-
- def start(self, env):
- import params
-
- env.set_params(params)
- self.config(env)
- datanode(action="start")
-
- def stop(self, env):
- import params
-
- env.set_params(params)
- datanode(action="stop")
-
- def config(self, env):
- import params
-
- datanode(action="configure")
-
- def status(self, env):
- import status_params
-
- env.set_params(status_params)
- check_process_status(status_params.datanode_pid_file)
-
-
-if __name__ == "__main__":
- DataNode().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_client.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_client.py
deleted file mode 100644
index 6babde5..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_client.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from utils import service
-
-
-class HdfsClient(Script):
- def install(self, env):
- import params
-
- self.install_packages(env)
- env.set_params(params)
- self.config(env)
-
- def start(self, env):
- import params
-
- env.set_params(params)
-
- def stop(self, env):
- import params
-
- env.set_params(params)
-
- def config(self, env):
- import params
-
- pass
-
-
-if __name__ == "__main__":
- HdfsClient().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/37f11ebd/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_datanode.py b/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_datanode.py
deleted file mode 100644
index e0b6c39..0000000
--- a/ambari-server/src/test/resources/stacks/HDP/2.0.7/services/HDFS/package/scripts/hdfs_datanode.py
+++ /dev/null
@@ -1,56 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from utils import service
-
-
-def datanode(action=None):
- import params
-
- if action == "configure":
- Directory(params.dfs_domain_socket_dir,
- recursive=True,
- mode=0750,
- owner=params.hdfs_user,
- group=params.user_group)
- Directory(params.dfs_data_dir,
- recursive=True,
- mode=0755,
- owner=params.hdfs_user,
- group=params.user_group)
-
- if action == "start":
- service(
- action=action, name="datanode",
- user=params.hdfs_user,
- create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_datanode_keytab_file,
- principal=params.dfs_datanode_kerberos_principal
- )
- if action == "stop":
- service(
- action=action, name="datanode",
- user=params.hdfs_user,
- create_pid_dir=True,
- create_log_dir=True,
- keytab=params.dfs_datanode_keytab_file,
- principal=params.dfs_datanode_kerberos_principal
- )