You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2017/07/24 17:28:03 UTC
[1/3] ambari git commit: AMBARI-21450 - Fixing Unit Test Logic From
trunk Merge (jonathanhurley)
Repository: ambari
Updated Branches:
refs/heads/branch-feature-AMBARI-21450 51e3080ef -> 15cd3d837
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
index 70f0332..2b4a008 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterResourceProviderTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -145,14 +145,14 @@ public class ClusterResourceProviderTest {
Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
Map<String, Object> properties = requestProperties.iterator().next();
properties.put(BaseClusterRequest.PROVISION_ACTION_PROPERTY, "INSTALL_ONLY");
- Map<String, String> requestInfoProperties = new HashMap<String, String>();
+ Map<String, String> requestInfoProperties = new HashMap<>();
requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{}");
// set expectations
expect(request.getProperties()).andReturn(requestProperties).anyTimes();
expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
- expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn(null)
+ expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.<Map<String, Object>>anyObject(), anyBoolean())).andReturn(null)
.once();
expect(topologyFactory.createProvisionClusterRequest(properties, null)).andReturn(topologyRequest).once();
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
@@ -174,7 +174,7 @@ public class ClusterResourceProviderTest {
public void testCreateResource_blueprint_withInvalidSecurityConfiguration() throws Exception {
Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
Map<String, Object> properties = requestProperties.iterator().next();
- Map<String, String> requestInfoProperties = new HashMap<String, String>();
+ Map<String, String> requestInfoProperties = new HashMap<>();
requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{\"security\" : {\n\"type\" : \"NONE\"," +
"\n\"kerberos_descriptor_reference\" : " + "\"testRef\"\n}}");
SecurityConfiguration blueprintSecurityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef",
@@ -185,7 +185,7 @@ public class ClusterResourceProviderTest {
expect(request.getProperties()).andReturn(requestProperties).anyTimes();
expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
- expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+ expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.<Map<String, Object>>anyObject(), anyBoolean())).andReturn
(securityConfiguration).once();
expect(topologyFactory.createProvisionClusterRequest(properties, securityConfiguration)).andReturn(topologyRequest).once();
expect(topologyRequest.getBlueprint()).andReturn(blueprint).anyTimes();
@@ -203,7 +203,7 @@ public class ClusterResourceProviderTest {
Map<String, Object> properties = requestProperties.iterator().next();
SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null);
- Map<String, String> requestInfoProperties = new HashMap<String, String>();
+ Map<String, String> requestInfoProperties = new HashMap<>();
requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{\"security\" : {\n\"type\" : \"KERBEROS\",\n\"kerberos_descriptor_reference\" : " +
"\"testRef\"\n}}");
@@ -212,7 +212,7 @@ public class ClusterResourceProviderTest {
expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
expect(topologyFactory.createProvisionClusterRequest(properties, securityConfiguration)).andReturn(topologyRequest).once();
- expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn
+ expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.<Map<String, Object>>anyObject(), anyBoolean())).andReturn
(securityConfiguration).once();
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
expect(requestStatusResponse.getRequestId()).andReturn(5150L).anyTimes();
@@ -284,10 +284,10 @@ public class ClusterResourceProviderTest {
((ObservableResourceProvider)provider).addObserver(observer);
// add the property map to a set for the request. add more maps for multiple creates
- Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+ Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
// Cluster 1: create a map of properties for the request
- Map<String, Object> properties = new LinkedHashMap<String, Object>();
+ Map<String, Object> properties = new LinkedHashMap<>();
// add the cluster name to the properties map
properties.put(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID, "Cluster100");
@@ -332,21 +332,21 @@ public class ClusterResourceProviderTest {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
Clusters clusters = createMock(Clusters.class);
- Set<ClusterResponse> allResponse = new HashSet<ClusterResponse>();
- allResponse.add(new ClusterResponse(100L, "Cluster100", State.INSTALLED, SecurityType.NONE, null, null, null, null));
- allResponse.add(new ClusterResponse(101L, "Cluster101", State.INSTALLED, SecurityType.NONE, null, null, null, null));
- allResponse.add(new ClusterResponse(102L, "Cluster102", State.INSTALLED, SecurityType.NONE, null, null, null, null));
- allResponse.add(new ClusterResponse(103L, "Cluster103", State.INSTALLED, SecurityType.NONE, null, null, null, null));
- allResponse.add(new ClusterResponse(104L, "Cluster104", State.INSTALLED, SecurityType.NONE, null, null, null, null));
+ Set<ClusterResponse> allResponse = new HashSet<>();
+ allResponse.add(new ClusterResponse(100L, "Cluster100", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
+ allResponse.add(new ClusterResponse(101L, "Cluster101", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
+ allResponse.add(new ClusterResponse(102L, "Cluster102", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
+ allResponse.add(new ClusterResponse(103L, "Cluster103", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
+ allResponse.add(new ClusterResponse(104L, "Cluster104", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
- Set<ClusterResponse> nameResponse = new HashSet<ClusterResponse>();
- nameResponse.add(new ClusterResponse(102L, "Cluster102", State.INSTALLED, SecurityType.NONE, null, null, null, null));
+ Set<ClusterResponse> nameResponse = new HashSet<>();
+ nameResponse.add(new ClusterResponse(102L, "Cluster102", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
- Set<ClusterResponse> idResponse = new HashSet<ClusterResponse>();
- idResponse.add(new ClusterResponse(103L, "Cluster103", State.INSTALLED, SecurityType.NONE, null, null, null, null));
+ Set<ClusterResponse> idResponse = new HashSet<>();
+ idResponse.add(new ClusterResponse(103L, "Cluster103", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
// set expectations
- Capture<Set<ClusterRequest>> captureClusterRequests = new Capture<Set<ClusterRequest>>();
+ Capture<Set<ClusterRequest>> captureClusterRequests = EasyMock.newCapture();
expect(managementController.getClusters(capture(captureClusterRequests))).andReturn(allResponse).once();
expect(managementController.getClusters(capture(captureClusterRequests))).andReturn(nameResponse).once();
@@ -365,7 +365,7 @@ public class ClusterResourceProviderTest {
PropertyHelper.getKeyPropertyIds(type),
managementController);
- Set<String> propertyIds = new HashSet<String>();
+ Set<String> propertyIds = new HashSet<>();
propertyIds.add(ClusterResourceProvider.CLUSTER_ID_PROPERTY_ID);
propertyIds.add(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID);
@@ -452,40 +452,40 @@ public class ClusterResourceProviderTest {
//todo: configuration properties are not being added to props
private Set<Map<String, Object>> createBlueprintRequestProperties(String clusterName, String blueprintName) {
- Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
- Map<String, Object> properties = new LinkedHashMap<String, Object>();
+ Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
+ Map<String, Object> properties = new LinkedHashMap<>();
properties.put(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID, clusterName);
properties.put(ClusterResourceProvider.BLUEPRINT_PROPERTY_ID, blueprintName);
propertySet.add(properties);
- Collection<Map<String, Object>> hostGroups = new ArrayList<Map<String, Object>>();
- Map<String, Object> hostGroupProperties = new HashMap<String, Object>();
+ Collection<Map<String, Object>> hostGroups = new ArrayList<>();
+ Map<String, Object> hostGroupProperties = new HashMap<>();
hostGroups.add(hostGroupProperties);
hostGroupProperties.put("name", "group1");
- Collection<Map<String, String>> hostGroupHosts = new ArrayList<Map<String, String>>();
+ Collection<Map<String, String>> hostGroupHosts = new ArrayList<>();
hostGroupProperties.put("hosts", hostGroupHosts);
- Map<String, String> hostGroupHostProperties = new HashMap<String, String>();
+ Map<String, String> hostGroupHostProperties = new HashMap<>();
hostGroupHostProperties.put("fqdn", "host.domain");
hostGroupHosts.add(hostGroupHostProperties);
properties.put("host_groups", hostGroups);
- Map<String, String> mapGroupConfigProperties = new HashMap<String, String>();
+ Map<String, String> mapGroupConfigProperties = new HashMap<>();
mapGroupConfigProperties.put("myGroupProp", "awesomeValue");
// blueprint core-site cluster configuration properties
- Map<String, String> blueprintCoreConfigProperties = new HashMap<String, String>();
+ Map<String, String> blueprintCoreConfigProperties = new HashMap<>();
blueprintCoreConfigProperties.put("property1", "value2");
blueprintCoreConfigProperties.put("new.property", "new.property.value");
- Map<String, String> blueprintGlobalConfigProperties = new HashMap<String, String>();
+ Map<String, String> blueprintGlobalConfigProperties = new HashMap<>();
blueprintGlobalConfigProperties.put("hive_database", "New MySQL Database");
- Map<String, String> oozieEnvConfigProperties = new HashMap<String, String>();
+ Map<String, String> oozieEnvConfigProperties = new HashMap<>();
oozieEnvConfigProperties.put("property1","value2");
- Map<String, String> hbaseEnvConfigProperties = new HashMap<String, String>();
+ Map<String, String> hbaseEnvConfigProperties = new HashMap<>();
hbaseEnvConfigProperties.put("property1","value2");
- Map<String, String> falconEnvConfigProperties = new HashMap<String, String>();
+ Map<String, String> falconEnvConfigProperties = new HashMap<>();
falconEnvConfigProperties.put("property1","value2");
return propertySet;
@@ -494,14 +494,14 @@ public class ClusterResourceProviderTest {
private void testCreateResource_blueprint(Authentication authentication) throws Exception {
Set<Map<String, Object>> requestProperties = createBlueprintRequestProperties(CLUSTER_NAME, BLUEPRINT_NAME);
Map<String, Object> properties = requestProperties.iterator().next();
- Map<String, String> requestInfoProperties = new HashMap<String, String>();
+ Map<String, String> requestInfoProperties = new HashMap<>();
requestInfoProperties.put(Request.REQUEST_INFO_BODY_PROPERTY, "{}");
// set expectations
expect(request.getProperties()).andReturn(requestProperties).anyTimes();
expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
- expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn(null)
+ expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.<Map<String, Object>>anyObject(), anyBoolean())).andReturn(null)
.once();
expect(topologyFactory.createProvisionClusterRequest(properties, null)).andReturn(topologyRequest).once();
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
@@ -545,10 +545,10 @@ public class ClusterResourceProviderTest {
((ObservableResourceProvider)provider).addObserver(observer);
// add the property map to a set for the request. add more maps for multiple creates
- Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+ Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
// Cluster 1: create a map of properties for the request
- Map<String, Object> properties = new LinkedHashMap<String, Object>();
+ Map<String, Object> properties = new LinkedHashMap<>();
// add the cluster name to the properties map
properties.put(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID, "Cluster100");
@@ -559,7 +559,7 @@ public class ClusterResourceProviderTest {
propertySet.add(properties);
// Cluster 2: create a map of properties for the request
- properties = new LinkedHashMap<String, Object>();
+ properties = new LinkedHashMap<>();
// add the cluster id to the properties map
properties.put(ClusterResourceProvider.CLUSTER_ID_PROPERTY_ID, 99L);
@@ -593,10 +593,10 @@ public class ClusterResourceProviderTest {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
- Set<ClusterResponse> nameResponse = new HashSet<ClusterResponse>();
- nameResponse.add(new ClusterResponse(102L, "Cluster102", State.INIT, SecurityType.NONE, null, null, null, null));
+ Set<ClusterResponse> nameResponse = new HashSet<>();
+ nameResponse.add(new ClusterResponse(102L, "Cluster102", State.INIT, SecurityType.NONE, null, 0, null, null));
- Map<String, String> mapRequestProps = new HashMap<String, String>();
+ Map<String, String> mapRequestProps = new HashMap<>();
mapRequestProps.put("context", "Called from a test");
// set expectations
@@ -628,7 +628,7 @@ public class ClusterResourceProviderTest {
((ObservableResourceProvider)provider).addObserver(observer);
- Map<String, Object> properties = new LinkedHashMap<String, Object>();
+ Map<String, Object> properties = new LinkedHashMap<>();
properties.put(ClusterResourceProvider.CLUSTER_VERSION_PROPERTY_ID, "HDP-0.1");
@@ -661,10 +661,10 @@ public class ClusterResourceProviderTest {
Clusters clusters = createMock(Clusters.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
- Set<ClusterResponse> nameResponse = new HashSet<ClusterResponse>();
- nameResponse.add(new ClusterResponse(100L, "Cluster100", State.INSTALLED, SecurityType.NONE, null, null, null, null));
+ Set<ClusterResponse> nameResponse = new HashSet<>();
+ nameResponse.add(new ClusterResponse(100L, "Cluster100", State.INSTALLED, SecurityType.NONE, null, 0, null, null));
- Map<String, String> mapRequestProps = new HashMap<String, String>();
+ Map<String, String> mapRequestProps = new HashMap<>();
mapRequestProps.put("context", "Called from a test");
// set expectations
@@ -680,7 +680,7 @@ public class ClusterResourceProviderTest {
SecurityContextHolder.getContext().setAuthentication(authentication);
- Map<String, Object> properties = new LinkedHashMap<String, Object>();
+ Map<String, Object> properties = new LinkedHashMap<>();
properties.put(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(PropertyHelper.getPropertyId("Clusters.desired_config", "type"), "global");
@@ -689,7 +689,7 @@ public class ClusterResourceProviderTest {
properties.put(PropertyHelper.getPropertyId("Clusters.desired_config.properties", "x"), "y");
- Map<String, Object> properties2 = new LinkedHashMap<String, Object>();
+ Map<String, Object> properties2 = new LinkedHashMap<>();
properties2.put(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties2.put(PropertyHelper.getPropertyId("Clusters.desired_config", "type"), "mapred-site");
@@ -697,7 +697,7 @@ public class ClusterResourceProviderTest {
properties2.put(PropertyHelper.getPropertyId("Clusters.desired_config.properties", "foo"), "A1");
properties2.put(PropertyHelper.getPropertyId("Clusters.desired_config.properties", "bar"), "B2");
- Set<Map<String, Object>> propertySet = new HashSet<Map<String, Object>>();
+ Set<Map<String, Object>> propertySet = new HashSet<>();
propertySet.add(properties);
propertySet.add(properties2);
@@ -806,10 +806,10 @@ public class ClusterResourceProviderTest {
managementController);
// add the property map to a set for the request. add more maps for multiple creates
- Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
+ Set<Map<String, Object>> propertySet = new LinkedHashSet<>();
// Cluster 1: create a map of properties for the request
- Map<String, Object> properties = new LinkedHashMap<String, Object>();
+ Map<String, Object> properties = new LinkedHashMap<>();
properties.put(ClusterResourceProvider.CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ClusterResourceProvider.CLUSTER_VERSION_PROPERTY_ID, "HDP-0.1");
properties.put(ClusterResourceProvider.CLUSTER_REPO_VERSION, "2.1.1");
@@ -826,7 +826,6 @@ public class ClusterResourceProviderTest {
assertTrue(cap.hasCaptured());
assertNotNull(cap.getValue());
- assertEquals("2.1.1", cap.getValue().getRepositoryVersion());
}
@Test
@@ -844,7 +843,7 @@ public class ClusterResourceProviderTest {
expect(request.getProperties()).andReturn(requestProperties).anyTimes();
expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties).anyTimes();
- expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(HashMap.class), anyBoolean())).andReturn(null)
+ expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.<Map<String, Object>>anyObject(), anyBoolean())).andReturn(null)
.once();
expect(topologyFactory.createProvisionClusterRequest(properties, null)).andReturn(topologyRequest).once();
expect(topologyManager.provisionCluster(topologyRequest)).andReturn(requestStatusResponse).once();
@@ -861,4 +860,4 @@ public class ClusterResourceProviderTest {
verifyAll();
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
index f133784..fdd1dc5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/JMXHostProviderTest.java
@@ -124,8 +124,7 @@ public class JMXHostProviderTest {
dStateStr = desiredState.toString();
}
- ServiceRequest r1 = new ServiceRequest(clusterName, serviceName, STACK_ID.getStackId(),
- REPO_VERSION, dStateStr);
+ ServiceRequest r1 = new ServiceRequest(clusterName, serviceName, m_repositoryVersion.getId(), dStateStr);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r1);
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
index 7fe888d..a0c58ea 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ServiceResourceProviderTest.java
@@ -153,7 +153,7 @@ public class ServiceResourceProviderTest {
properties.put(ServiceResourceProvider.SERVICE_SERVICE_NAME_PROPERTY_ID, "Service100");
properties.put(ServiceResourceProvider.SERVICE_SERVICE_STATE_PROPERTY_ID, "INIT");
properties.put(ServiceResourceProvider.SERVICE_DESIRED_STACK_PROPERTY_ID, "HDP-1.1");
- properties.put(ServiceResourceProvider.SERVICE_DESIRED_REPO_VERSION_PROPERTY_ID, "1");
+ properties.put(ServiceResourceProvider.SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID, "1");
propertySet.add(properties);
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
index d07ac15..2e216e6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckActionTest.java
@@ -91,7 +91,7 @@ public class ComponentVersionCheckActionTest {
private static final StackId HDP_21_STACK = new StackId("HDP-2.1.1");
private static final StackId HDP_22_STACK = new StackId("HDP-2.2.0");
- private static final String HDP_211_CENTOS6_REPO_URL = "http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0";
+ private static final String HDP_211_CENTOS6_REPO_URL = "http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.1.1.0-118";
private Injector m_injector;
@@ -544,4 +544,4 @@ public class ComponentVersionCheckActionTest {
configFactory.createNew(cluster, "core-site", "version1", properties, propertiesAttributes);
configFactory.createNew(cluster, "foo-site", "version1", properties, propertiesAttributes);
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
index 731e100..e80b59b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/ConfigureActionTest.java
@@ -137,12 +137,9 @@ public class ConfigureActionTest {
m_injector.getInstance(GuiceJpaInitializer.class);
m_injector.injectMembers(this);
- repoVersion2110 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"),
- "2.1.1.0-1234");
- repoVersion2111 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"),
- "2.1.1.1-5678");
- repoVersion2200 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.2.0"),
- "2.2.0.0-1234");
+ repoVersion2110 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"), "2.1.1.0-1234");
+ repoVersion2111 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"), "2.1.1.1-5678");
+ repoVersion2200 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.2.0"), "2.2.0.0-1234");
makeUpgradeCluster();
}
@@ -170,7 +167,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -220,13 +217,16 @@ public class ConfigureActionTest {
Cluster c = clusters.getCluster("c1");
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+ c.setCurrentStackVersion(repoVersion2110.getStackId());
+ c.setDesiredStackVersion(repoVersion2200.getStackId());
+
Map<String, String> properties = new HashMap<String, String>() {
{
put("initLimit", "10");
}
};
- Config config = createConfig(c, repoVersion2200, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -282,7 +282,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -340,7 +340,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -470,7 +470,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -528,7 +528,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -594,7 +594,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -643,13 +643,16 @@ public class ConfigureActionTest {
Cluster c = clusters.getCluster("c1");
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+ c.setCurrentStackVersion(repoVersion2110.getStackId());
+ c.setDesiredStackVersion(repoVersion2200.getStackId());
+
Map<String, String> properties = new HashMap<String, String>() {
{
put("fooKey", "barValue");
}
};
- Config config = createConfig(c, repoVersion2200, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -701,6 +704,9 @@ public class ConfigureActionTest {
Cluster c = clusters.getCluster("c1");
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+ c.setCurrentStackVersion(repoVersion2110.getStackId());
+ c.setDesiredStackVersion(repoVersion2200.getStackId());
+
Map<String, String> properties = new HashMap<String, String>() {
{
put("set.key.1", "s1");
@@ -710,7 +716,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2200, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -798,7 +804,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2200, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -863,10 +869,12 @@ public class ConfigureActionTest {
@Test
public void testAllowedReplacment() throws Exception {
-
Cluster c = clusters.getCluster("c1");
assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+ c.setCurrentStackVersion(repoVersion2110.getStackId());
+ c.setDesiredStackVersion(repoVersion2200.getStackId());
+
Map<String, String> properties = new HashMap<String, String>() {
{
put("replace.key.1", "r1");
@@ -877,7 +885,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2200, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -964,7 +972,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1044,7 +1052,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1153,7 +1161,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1249,7 +1257,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1351,7 +1359,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1448,7 +1456,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1542,7 +1550,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1633,7 +1641,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version2", properties);
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
c.addDesiredConfig("user", Collections.singleton(config));
assertEquals(2, c.getConfigsByType("zoo.cfg").size());
@@ -1735,7 +1743,7 @@ public class ConfigureActionTest {
}
};
- Config config = createConfig(c, repoVersion2110, "zoo.cfg", "version1", properties);
+ Config config = createConfig(c, "zoo.cfg", "version1", properties);
c.addDesiredConfig("user", Collections.singleton(config));
@@ -1847,8 +1855,8 @@ public class ConfigureActionTest {
return executionCommand;
}
- private Config createConfig(Cluster cluster, RepositoryVersionEntity repoVersion, String type,
- String tag, Map<String, String> properties) {
+ private Config createConfig(Cluster cluster, String type, String tag,
+ Map<String, String> properties) {
return configFactory.createNew(cluster, type, tag, properties,
NO_ATTRIBUTES);
}
[2/3] ambari git commit: AMBARI-21450 - Fixing Unit Test Logic From
trunk Merge (jonathanhurley)
Posted by jo...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
index 0df6f68..bc78293 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/ServiceConfigEntity.java
@@ -21,7 +21,6 @@ package org.apache.ambari.server.orm.entities;
import java.util.List;
import javax.persistence.Basic;
-import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
@@ -47,12 +46,24 @@ import javax.persistence.TableGenerator;
, initialValue = 1
)
@NamedQueries({
- @NamedQuery(name = "ServiceConfigEntity.findAll", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId ORDER BY serviceConfig.version DESC"),
- @NamedQuery(name = "ServiceConfigEntity.findNextServiceConfigVersion", query = "SELECT COALESCE(MAX(serviceConfig.version), 0) + 1 AS nextVersion FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.serviceName=:serviceName AND serviceConfig.clusterId=:clusterId"),
- @NamedQuery(name = "ServiceConfigEntity.findAllServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId AND serviceConfig.stack=:stack"),
- @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByStack", query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId = :clusterId AND serviceConfig.version = (SELECT MAX(serviceConfig2.version) FROM ServiceConfigEntity serviceConfig2 WHERE serviceConfig2.clusterId=:clusterId AND serviceConfig2.stack=:stack AND serviceConfig2.serviceName = serviceConfig.serviceName)"),
- @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByService", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
- @NamedQuery(name = "ServiceConfigEntity.findLatestServiceConfigsByCluster", query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceConfigId IN (SELECT MAX(scv1.serviceConfigId) FROM ServiceConfigEntity scv1 WHERE (scv1.clusterId = :clusterId) AND (scv1.groupId IS NULL) GROUP BY scv1.serviceName)")})
+ @NamedQuery(
+ name = "ServiceConfigEntity.findAll",
+ query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId ORDER BY serviceConfig.version DESC"),
+ @NamedQuery(
+ name = "ServiceConfigEntity.findNextServiceConfigVersion",
+ query = "SELECT COALESCE(MAX(serviceConfig.version), 0) + 1 AS nextVersion FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.serviceName=:serviceName AND serviceConfig.clusterId=:clusterId"),
+ @NamedQuery(
+ name = "ServiceConfigEntity.findServiceConfigsByStack",
+ query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId=:clusterId AND serviceConfig.stack=:stack AND serviceConfig.serviceName=:serviceName"),
+ @NamedQuery(
+ name = "ServiceConfigEntity.findLatestServiceConfigsByStack",
+ query = "SELECT serviceConfig FROM ServiceConfigEntity serviceConfig WHERE serviceConfig.clusterId = :clusterId AND (serviceConfig.groupId = null OR serviceConfig.groupId IN (SELECT cg.groupId from ConfigGroupEntity cg)) AND serviceConfig.version = (SELECT MAX(serviceConfig2.version) FROM ServiceConfigEntity serviceConfig2 WHERE serviceConfig2.clusterId= :clusterId AND serviceConfig2.stack = :stack AND serviceConfig2.serviceName = serviceConfig.serviceName)"),
+ @NamedQuery(
+ name = "ServiceConfigEntity.findLatestServiceConfigsByService",
+ query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceName = :serviceName AND (scv.groupId = null OR scv.groupId IN (SELECT cg.groupId from ConfigGroupEntity cg)) AND scv.version = (SELECT MAX(scv2.version) FROM ServiceConfigEntity scv2 WHERE (scv2.serviceName = :serviceName AND scv2.clusterId = :clusterId) AND (scv2.groupId = scv.groupId OR (scv2.groupId IS NULL AND scv.groupId IS NULL)))"),
+ @NamedQuery(
+ name = "ServiceConfigEntity.findLatestServiceConfigsByCluster",
+ query = "SELECT scv FROM ServiceConfigEntity scv WHERE scv.clusterId = :clusterId AND scv.serviceConfigId IN (SELECT MAX(scv1.serviceConfigId) FROM ServiceConfigEntity scv1 WHERE (scv1.clusterId = :clusterId) AND (scv1.groupId IS NULL) GROUP BY scv1.serviceName)") })
public class ServiceConfigEntity {
@Id
@Column(name = "service_config_id")
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckAction.java
index 44d2b4d..0d87cd8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ComponentVersionCheckAction.java
@@ -118,4 +118,4 @@ public class ComponentVersionCheckAction extends FinalizeUpgradeAction {
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
index eb432df..5c65911 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -34,7 +34,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.ConfigurationRequest;
-import org.apache.ambari.server.serveraction.AbstractServerAction;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
@@ -45,12 +45,14 @@ import org.apache.ambari.server.state.ConfigMergeHelper.ThreeWayValue;
import org.apache.ambari.server.state.DesiredConfig;
import org.apache.ambari.server.state.PropertyInfo;
import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.UpgradeContext;
import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.ConfigurationKeyValue;
import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.Insert;
import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.Masked;
import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.Replace;
import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.Transfer;
import org.apache.ambari.server.state.stack.upgrade.ConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
import org.apache.ambari.server.state.stack.upgrade.PropertyKeyState;
import org.apache.ambari.server.state.stack.upgrade.TransferOperation;
import org.apache.commons.lang.StringUtils;
@@ -82,9 +84,9 @@ import com.google.inject.Provider;
* property value</li>
* </ul>
*/
-public class ConfigureAction extends AbstractServerAction {
+public class ConfigureAction extends AbstractUpgradeServerAction {
- private static Logger LOG = LoggerFactory.getLogger(ConfigureAction.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ConfigureAction.class);
/**
* Used to lookup the cluster.
@@ -182,9 +184,16 @@ public class ConfigureAction extends AbstractServerAction {
String clusterName = commandParameters.get("clusterName");
Cluster cluster = m_clusters.getCluster(clusterName);
+ UpgradeContext upgradeContext = getUpgradeContext(cluster);
// such as hdfs-site or hbase-env
String configType = commandParameters.get(ConfigureTask.PARAMETER_CONFIG_TYPE);
+ String serviceName = cluster.getServiceByConfigType(configType);
+
+ RepositoryVersionEntity sourceRepoVersion = upgradeContext.getSourceRepositoryVersion(serviceName);
+ RepositoryVersionEntity targetRepoVersion = upgradeContext.getTargetRepositoryVersion(serviceName);
+ StackId sourceStackId = sourceRepoVersion.getStackId();
+ StackId targetStackId = targetRepoVersion.getStackId();
// extract setters
List<ConfigurationKeyValue> keyValuePairs = Collections.emptyList();
@@ -252,13 +261,12 @@ public class ConfigureAction extends AbstractServerAction {
if (desiredConfig == null) {
throw new AmbariException("Could not find desired config type with name " + configType);
}
+
Config config = cluster.getConfig(configType, desiredConfig.getTag());
if (config == null) {
throw new AmbariException("Could not find config type with name " + configType);
}
- StackId currentStack = cluster.getCurrentStackVersion();
- StackId targetStack = cluster.getDesiredStackVersion();
StackId configStack = config.getStackId();
// !!! initial reference values
@@ -405,8 +413,8 @@ public class ConfigureAction extends AbstractServerAction {
String oldValue = base.get(key);
// !!! values are not changing, so make this a no-op
- if (null != oldValue && value.equals(oldValue)) {
- if (currentStack.equals(targetStack) && !changedValues) {
+ if (StringUtils.equals(value, oldValue)) {
+ if (sourceStackId.equals(targetStackId) && !changedValues) {
updateBufferWithMessage(outputBuffer,
MessageFormat.format(
"{0}/{1} for cluster {2} would not change, skipping setting", configType, key,
@@ -519,7 +527,7 @@ public class ConfigureAction extends AbstractServerAction {
// !!! check to see if we're going to a new stack and double check the
// configs are for the target. Then simply update the new properties instead
// of creating a whole new history record since it was already done
- if (!targetStack.equals(currentStack) && targetStack.equals(configStack)) {
+ if (!targetStackId.equals(sourceStackId) && targetStackId.equals(configStack)) {
config.setProperties(newValues);
config.save();
@@ -528,7 +536,9 @@ public class ConfigureAction extends AbstractServerAction {
// !!! values are different and within the same stack. create a new
// config and service config version
- String serviceVersionNote = "Stack Upgrade";
+ Direction direction = upgradeContext.getDirection();
+ String serviceVersionNote = String.format("%s %s %s", direction.getText(true),
+ direction.getPreposition(), upgradeContext.getRepositoryVersion().getVersion());
String auditName = getExecutionCommand().getRoleParams().get(ServerAction.ACTION_USER_NAME);
@@ -536,12 +546,10 @@ public class ConfigureAction extends AbstractServerAction {
auditName = m_configuration.getAnonymousAuditName();
}
- m_configHelper.createConfigType(cluster, cluster.getDesiredStackVersion(), m_controller, configType,
+ m_configHelper.createConfigType(cluster, targetStackId, m_controller, configType,
newValues, auditName, serviceVersionNote);
- String message = "Finished updating configuration ''{0}''";
- message = MessageFormat.format(message, configType);
- return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", message, "");
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputBuffer.toString(), "");
}
@@ -780,4 +788,4 @@ public class ConfigureAction extends AbstractServerAction {
private void updateBufferWithMessage(StringBuilder buffer, String message) {
buffer.append(message).append(System.lineSeparator());
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
index 345f598..6332fbf 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java
@@ -65,6 +65,8 @@ import org.apache.ambari.server.controller.spi.ClusterController;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.utilities.ClusterControllerHelper;
+import org.apache.ambari.server.orm.dao.RepositoryVersionDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.security.authorization.AuthorizationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
@@ -104,6 +106,9 @@ public class AmbariContext {
@Inject
ConfigFactory configFactory;
+ @Inject
+ RepositoryVersionDAO repositoryVersionDAO;
+
/**
* Used for getting configuration property values from stack and services.
*/
@@ -186,18 +191,24 @@ public class AmbariContext {
return getController().getActionManager().getTasks(ids);
}
- public void createAmbariResources(ClusterTopology topology, String clusterName, SecurityType securityType, String repoVersion) {
+ public void createAmbariResources(ClusterTopology topology, String clusterName, SecurityType securityType, String repoVersionString) {
Stack stack = topology.getBlueprint().getStack();
StackId stackId = new StackId(stack.getName(), stack.getVersion());
- createAmbariClusterResource(clusterName, stack.getName(), stack.getVersion(), securityType, repoVersion);
- createAmbariServiceAndComponentResources(topology, clusterName, stackId, repoVersion);
+ RepositoryVersionEntity repoVersion = repositoryVersionDAO.findByStackAndVersion(stackId, repoVersionString);
+
+ if (null == repoVersion) {
+ throw new IllegalArgumentException(String.format("Could not identify repository version with stack %s and version %s for installing services",
+ stackId, repoVersionString));
+ }
+
+ createAmbariClusterResource(clusterName, stack.getName(), stack.getVersion(), securityType);
+ createAmbariServiceAndComponentResources(topology, clusterName, stackId, repoVersion.getId());
}
- public void createAmbariClusterResource(String clusterName, String stackName, String stackVersion, SecurityType securityType, String repoVersion) {
+ public void createAmbariClusterResource(String clusterName, String stackName, String stackVersion, SecurityType securityType) {
String stackInfo = String.format("%s-%s", stackName, stackVersion);
final ClusterRequest clusterRequest = new ClusterRequest(null, clusterName, null, securityType, stackInfo, null);
- clusterRequest.setRepositoryVersion(repoVersion);
try {
RetryHelper.executeWithRetry(new Callable<Object>() {
@@ -219,7 +230,7 @@ public class AmbariContext {
}
public void createAmbariServiceAndComponentResources(ClusterTopology topology, String clusterName,
- StackId stackId, String repositoryVersion) {
+ StackId stackId, Long repositoryVersionId) {
Collection<String> services = topology.getBlueprint().getServices();
try {
@@ -232,8 +243,7 @@ public class AmbariContext {
Set<ServiceComponentRequest> componentRequests = new HashSet<>();
for (String service : services) {
String credentialStoreEnabled = topology.getBlueprint().getCredentialStoreEnabled(service);
- serviceRequests.add(new ServiceRequest(clusterName, service, stackId.getStackId(),
- repositoryVersion, null, credentialStoreEnabled));
+ serviceRequests.add(new ServiceRequest(clusterName, service, repositoryVersionId, null, credentialStoreEnabled));
for (String component : topology.getBlueprint().getComponents(service)) {
String recoveryEnabled = topology.getBlueprint().getRecoveryEnabled(service, component);
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
index 4100f15..e20b7b3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapperTest.java
@@ -294,6 +294,7 @@ public class ExecutionCommandWrapperTest {
StackId stackId = cluster.getDesiredStackVersion();
RepositoryVersionEntity repositoryVersion = ormTestHelper.getOrCreateRepositoryVersion(stackId, "0.1-0000");
+ cluster.addService("HDFS", repositoryVersion);
// first try with an INSTALL command - this should not populate version info
ExecutionCommand executionCommand = new ExecutionCommand();
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
index 5feb3cc..8f0c467 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/agent/AgentResourceTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -35,6 +35,7 @@ import org.apache.ambari.server.actionmanager.HostRoleCommandFactoryImpl;
import org.apache.ambari.server.actionmanager.StageFactory;
import org.apache.ambari.server.agent.rest.AgentResource;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
import org.apache.ambari.server.metadata.CachedRoleCommandOrderProvider;
import org.apache.ambari.server.metadata.RoleCommandOrderProvider;
@@ -74,13 +75,13 @@ import org.apache.ambari.server.state.stack.OsFamily;
import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
import org.apache.ambari.server.topology.PersistedState;
import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.easymock.EasyMock;
import org.eclipse.jetty.server.SessionManager;
import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
@@ -102,7 +103,7 @@ import junit.framework.Assert;
public class AgentResourceTest extends RandomPortJerseyTest {
static String PACKAGE_NAME = "org.apache.ambari.server.agent.rest";
- private static Log LOG = LogFactory.getLog(AgentResourceTest.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AgentResourceTest.class);
protected Client client;
HeartBeatHandler handler;
ActionManager actionManager;
@@ -323,6 +324,7 @@ public class AgentResourceTest extends RandomPortJerseyTest {
bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class));
bind(RoleCommandOrderProvider.class).to(CachedRoleCommandOrderProvider.class);
+ bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class));
}
private void installDependencies() {
@@ -358,4 +360,4 @@ public class AgentResourceTest extends RandomPortJerseyTest {
EasyMock.createMock(StackManagerFactory.class));
}
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
index 608c3ae..5ee3c8a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/AmbariMetaInfoTest.java
@@ -242,7 +242,7 @@ public class AmbariMetaInfoTest {
assertNotNull(redhat6Repo);
for (RepositoryInfo ri : redhat6Repo) {
if (STACK_NAME_HDP.equals(ri.getRepoName())) {
- assertTrue(ri.getBaseUrl().equals(ri.getDefaultBaseUrl()));
+ assertEquals(ri.getBaseUrl(), ri.getDefaultBaseUrl());
}
}
}
@@ -1702,7 +1702,7 @@ public class AmbariMetaInfoTest {
AlertDefinitionDAO dao = injector.getInstance(AlertDefinitionDAO.class);
List<AlertDefinitionEntity> definitions = dao.findAll(clusterId);
- assertEquals(13, definitions.size());
+ assertEquals(12, definitions.size());
// figure out how many of these alerts were merged into from the
// non-stack alerts.json
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/HostEventCreatorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/HostEventCreatorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/HostEventCreatorTest.java
index ae7ba9d..45cd000 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/HostEventCreatorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/HostEventCreatorTest.java
@@ -18,8 +18,6 @@
package org.apache.ambari.server.audit.request.creator;
-import junit.framework.Assert;
-
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@@ -38,6 +36,8 @@ import org.apache.ambari.server.controller.internal.HostResourceProvider;
import org.apache.ambari.server.controller.spi.Resource;
import org.junit.Test;
+import junit.framework.Assert;
+
public class HostEventCreatorTest extends AuditEventCreatorTestBase{
@Test
@@ -45,7 +45,7 @@ public class HostEventCreatorTest extends AuditEventCreatorTestBase{
HostEventCreator creator = new HostEventCreator();
Map<String,Object> properties = new HashMap<>();
- properties.put(HostResourceProvider.HOST_NAME_PROPERTY_ID, "ambari1.example.com");
+ properties.put(HostResourceProvider.HOST_HOST_NAME_PROPERTY_ID, "ambari1.example.com");
Request request = AuditEventCreatorTestHelper.createRequest(Request.Type.POST, Resource.Type.Host, properties, null);
Result result = AuditEventCreatorTestHelper.createResult(new ResultStatus(ResultStatus.STATUS.OK));
@@ -73,7 +73,7 @@ public class HostEventCreatorTest extends AuditEventCreatorTestBase{
properties.put("host_components", set);
- Request request = AuditEventCreatorTestHelper.createRequest(Request.Type.QUERY_POST, Resource.Type.Host, properties, null, HostResourceProvider.HOST_NAME_PROPERTY_ID + "=ambari1.example.com");
+ Request request = AuditEventCreatorTestHelper.createRequest(Request.Type.QUERY_POST, Resource.Type.Host, properties, null, HostResourceProvider.HOST_HOST_NAME_PROPERTY_ID + "=ambari1.example.com");
Result result = AuditEventCreatorTestHelper.createResult(new ResultStatus(ResultStatus.STATUS.OK));
AuditEvent event = AuditEventCreatorTestHelper.getEvent(creator, request, result);
@@ -106,4 +106,4 @@ public class HostEventCreatorTest extends AuditEventCreatorTestBase{
Assert.assertTrue(actual.contains(userName));
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
index 1af8321..a60b696 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelperTest.java
@@ -695,7 +695,7 @@ public class AmbariCustomCommandExecutionHelperTest {
RepositoryVersionEntity repositoryVersion) throws AmbariException, AuthorizationException {
ServiceRequest r1 = new ServiceRequest(clusterName, serviceName,
- repositoryVersion.getStackId().getStackId(), repositoryVersion.getVersion(), null, "false");
+ repositoryVersion.getId(), null, "false");
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r1);
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
index 4024f05..c80620f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerImplTest.java
@@ -556,19 +556,14 @@ public class AmbariManagementControllerImplTest {
@Test
public void testUpdateClusters() throws Exception {
// member state mocks
- Capture<AmbariManagementController> controllerCapture = EasyMock.newCapture();
+ Capture<AmbariManagementController> controllerCapture = new Capture<>();
Injector injector = createStrictMock(Injector.class);
Cluster cluster = createNiceMock(Cluster.class);
ActionManager actionManager = createNiceMock(ActionManager.class);
ClusterRequest clusterRequest = createNiceMock(ClusterRequest.class);
- ConfigurationRequest configurationRequest = createNiceMock(ConfigurationRequest.class);
// requests
- Set<ClusterRequest> setRequests = new HashSet<ClusterRequest>();
- setRequests.add(clusterRequest);
-
- List<ConfigurationRequest> configRequests = new ArrayList<>();
- configRequests.add(configurationRequest);
+ Set<ClusterRequest> setRequests = Collections.singleton(clusterRequest);
KerberosHelper kerberosHelper = createStrictMock(KerberosHelper.class);
// expectations
@@ -578,22 +573,17 @@ public class AmbariManagementControllerImplTest {
expect(injector.getInstance(KerberosHelper.class)).andReturn(kerberosHelper);
expect(clusterRequest.getClusterName()).andReturn("clusterNew").times(3);
expect(clusterRequest.getClusterId()).andReturn(1L).times(6);
- expect(clusterRequest.getDesiredConfig()).andReturn(configRequests);
- expect(configurationRequest.getVersionTag()).andReturn(null).times(1);
expect(clusters.getClusterById(1L)).andReturn(cluster).times(2);
expect(cluster.getClusterName()).andReturn("clusterOld").times(1);
- cluster.addSessionAttributes(EasyMock.<Map<String, Object>>anyObject());
+ cluster.addSessionAttributes(anyObject(Map.class));
expectLastCall().once();
cluster.setClusterName("clusterNew");
expectLastCall();
- configurationRequest.setVersionTag(EasyMock.anyObject(String.class));
- expectLastCall();
-
// replay mocks
- replay(actionManager, cluster, clusters, injector, clusterRequest, sessionManager, configurationRequest);
+ replay(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
// test
AmbariManagementController controller = new AmbariManagementControllerImpl(actionManager, clusters, injector);
@@ -601,9 +591,8 @@ public class AmbariManagementControllerImplTest {
// assert and verify
assertSame(controller, controllerCapture.getValue());
- verify(actionManager, cluster, clusters, injector, clusterRequest, sessionManager, configurationRequest);
+ verify(actionManager, cluster, clusters, injector, clusterRequest, sessionManager);
}
-
/**
* Ensure that processing update request does not fail on configuration
* properties with no value specified (no value = null reference value)
@@ -2034,7 +2023,7 @@ public class AmbariManagementControllerImplTest {
expect(injector.getInstance(Gson.class)).andReturn(null);
expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(maintHelper).anyTimes();
expect(injector.getInstance(KerberosHelper.class)).andReturn(createNiceMock(KerberosHelper.class));
-
+
OsFamily osFamilyMock = createNiceMock(OsFamily.class);
EasyMock.expect(osFamilyMock.isVersionedOsFamilyExtendedByVersionedFamily("testOSFamily", "testOSFamily")).andReturn(true).times(3);
@@ -2229,7 +2218,7 @@ public class AmbariManagementControllerImplTest {
public NestedTestClass(ActionManager actionManager, Clusters clusters, Injector injector, OsFamily osFamilyMock) throws Exception {
super(actionManager, clusters, injector);
- this.osFamily = osFamilyMock;
+ osFamily = osFamilyMock;
}
// public ServiceOsSpecific testPopulateServicePackagesInfo(ServiceInfo serviceInfo, Map<String, String> hostParams,
@@ -2398,7 +2387,6 @@ public class AmbariManagementControllerImplTest {
f.set(controller, configuration);
ClusterRequest cr = new ClusterRequest(null, "c1", "HDP-2.1", null);
- cr.setRepositoryVersion("2.1.1.0-1234");
controller.createCluster(cr);
// verification
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 9b03567..e029d85 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -205,8 +205,6 @@ public class AmbariManagementControllerTest {
private static final String REQUEST_CONTEXT_PROPERTY = "context";
- private static final String CLUSTER_HOST_INFO = "clusterHostInfo";
-
private static AmbariManagementController controller;
private static Clusters clusters;
private ActionDBAccessor actionDB;
@@ -377,7 +375,7 @@ public class AmbariManagementControllerTest {
}
ServiceRequest r1 = new ServiceRequest(clusterName, serviceName,
- repositoryVersion.getStackId().getStackId(), repositoryVersion.getVersion(), dStateStr,
+ repositoryVersion.getId(), dStateStr,
null);
Set<ServiceRequest> requests = new HashSet<>();
@@ -461,7 +459,7 @@ public class AmbariManagementControllerTest {
private long stopService(String clusterName, String serviceName,
boolean runSmokeTests, boolean reconfigureClients) throws
AmbariException, AuthorizationException {
- ServiceRequest r = new ServiceRequest(clusterName, serviceName, null, null, State.INSTALLED.toString(), null);
+ ServiceRequest r = new ServiceRequest(clusterName, serviceName, null, State.INSTALLED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
Map<String, String> mapRequestProps = new HashMap<>();
@@ -526,7 +524,7 @@ public class AmbariManagementControllerTest {
boolean runSmokeTests, boolean reconfigureClients,
MaintenanceStateHelper maintenanceStateHelper) throws
AmbariException, AuthorizationException {
- ServiceRequest r = new ServiceRequest(clusterName, serviceName, "HDP-0.2", "0.2-1234",
+ ServiceRequest r = new ServiceRequest(clusterName, serviceName, repositoryVersion02.getId(),
State.STARTED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -581,7 +579,7 @@ public class AmbariManagementControllerTest {
Map<String, String> mapRequestPropsInput)
throws AmbariException, AuthorizationException {
- ServiceRequest r = new ServiceRequest(clusterName, serviceName, "HDP-0.2", "0.2-1234",
+ ServiceRequest r = new ServiceRequest(clusterName, serviceName, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
@@ -710,7 +708,7 @@ public class AmbariManagementControllerTest {
Assert.assertEquals(serviceName, s.getName());
Assert.assertEquals(cluster1, s.getCluster().getClusterName());
- ServiceRequest req = new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", null, null);
+ ServiceRequest req = new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), null, null);
Set<ServiceResponse> r =
ServiceResourceProviderTest.getServices(controller, Collections.singleton(req));
@@ -732,7 +730,7 @@ public class AmbariManagementControllerTest {
try {
set1.clear();
- ServiceRequest rInvalid = new ServiceRequest(null, null, null, null, null, null);
+ ServiceRequest rInvalid = new ServiceRequest(null, null, null, null, null);
set1.add(rInvalid);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
fail("Expected failure for invalid requests");
@@ -742,7 +740,7 @@ public class AmbariManagementControllerTest {
try {
set1.clear();
- ServiceRequest rInvalid = new ServiceRequest("foo", null, null, null, null, null);
+ ServiceRequest rInvalid = new ServiceRequest("foo", null, null, null, null);
set1.add(rInvalid);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
fail("Expected failure for invalid requests");
@@ -752,7 +750,7 @@ public class AmbariManagementControllerTest {
try {
set1.clear();
- ServiceRequest rInvalid = new ServiceRequest("foo", "bar", null, null, null, null);
+ ServiceRequest rInvalid = new ServiceRequest("foo", "bar", null, null, null);
set1.add(rInvalid);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
fail("Expected failure for invalid cluster");
@@ -770,8 +768,8 @@ public class AmbariManagementControllerTest {
try {
set1.clear();
- ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", null, null, null, null);
- ServiceRequest valid2 = new ServiceRequest(cluster1, "HDFS", null, null, null, null);
+ ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", null, null, null);
+ ServiceRequest valid2 = new ServiceRequest(cluster1, "HDFS", null, null, null);
set1.add(valid1);
set1.add(valid2);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
@@ -782,7 +780,7 @@ public class AmbariManagementControllerTest {
try {
set1.clear();
- ServiceRequest valid1 = new ServiceRequest(cluster1, "bar", "HDP-0.2", "0.2-1234", State.STARTED.toString(), null);
+ ServiceRequest valid1 = new ServiceRequest(cluster1, "bar", repositoryVersion02.getId(), State.STARTED.toString(), null);
set1.add(valid1);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
fail("Expected failure for invalid service");
@@ -793,8 +791,8 @@ public class AmbariManagementControllerTest {
try {
set1.clear();
- ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", State.STARTED.toString(), null);
- ServiceRequest valid2 = new ServiceRequest(cluster2, "HDFS", "HDP-0.2", "0.2-1234", State.STARTED.toString(), null);
+ ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), State.STARTED.toString(), null);
+ ServiceRequest valid2 = new ServiceRequest(cluster2, "HDFS", repositoryVersion02.getId(), State.STARTED.toString(), null);
set1.add(valid1);
set1.add(valid2);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
@@ -807,14 +805,14 @@ public class AmbariManagementControllerTest {
Assert.assertEquals(0, clusters.getCluster(cluster1).getServices().size());
set1.clear();
- ServiceRequest valid = new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", null, null);
+ ServiceRequest valid = new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), null, null);
set1.add(valid);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
try {
set1.clear();
- ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", State.STARTED.toString(), null);
- ServiceRequest valid2 = new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", State.STARTED.toString(), null);
+ ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), State.STARTED.toString(), null);
+ ServiceRequest valid2 = new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), State.STARTED.toString(), null);
set1.add(valid1);
set1.add(valid2);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
@@ -856,7 +854,7 @@ public class AmbariManagementControllerTest {
String serviceName2 = "MAPREDUCE";
createService(cluster1, serviceName2, State.INIT);
- ServiceRequest r = new ServiceRequest(cluster1, null, null, null, null, null);
+ ServiceRequest r = new ServiceRequest(cluster1, null, null, null, null);
Set<ServiceResponse> response = ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
Assert.assertEquals(2, response.size());
@@ -876,15 +874,15 @@ public class AmbariManagementControllerTest {
clusters.addCluster(cluster1, new StackId("HDP-0.1"));
- ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", "HDP-0.1", "0.1-1234", null, null);
- ServiceRequest valid2 = new ServiceRequest(cluster1, "MAPREDUCE", "HDP-0.1", "0.1-1234", null, null);
+ ServiceRequest valid1 = new ServiceRequest(cluster1, "HDFS", repositoryVersion01.getId(), null, null);
+ ServiceRequest valid2 = new ServiceRequest(cluster1, "MAPREDUCE", repositoryVersion01.getId(), null, null);
set1.add(valid1);
set1.add(valid2);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
try {
- valid1 = new ServiceRequest(cluster1, "PIG", "HDP-0.1", "0.1-1234", null, null);
- valid2 = new ServiceRequest(cluster1, "MAPREDUCE", "HDP-0.1", "0.2-1234", null, null);
+ valid1 = new ServiceRequest(cluster1, "PIG", repositoryVersion01.getId(), null, null);
+ valid2 = new ServiceRequest(cluster1, "MAPREDUCE", 4L, null, null);
set1.add(valid1);
set1.add(valid2);
ServiceResourceProviderTest.createServices(controller, repositoryVersionDAO, set1);
@@ -1805,8 +1803,6 @@ public class AmbariManagementControllerTest {
String host2 = getUniqueName();
- Map<String, String> hostAttributes = null;
-
HostRequest r1 = new HostRequest(host1, null);
r1.toString();
@@ -1966,7 +1962,7 @@ public class AmbariManagementControllerTest {
Config c1 = configFactory.createNew(cluster, "hdfs-site", "v1", properties, propertiesAttributes);
configs.put(c1.getType(), c1);
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.2", "0.2-1234",
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
@@ -1997,7 +1993,6 @@ public class AmbariManagementControllerTest {
String serviceName = "HDFS";
Cluster cluster = clusters.getCluster(cluster1);
- Service s1 = cluster.getService(serviceName);
Map<String, Config> configs = new HashMap<>();
Map<String, String> properties = new HashMap<>();
@@ -2011,7 +2006,7 @@ public class AmbariManagementControllerTest {
properties.put("d", "d1");
Config c2 = configFactory.createNew(cluster, "core-site", "v1", properties, propertiesAttributes);
- Config c3 = configFactory.createNew(cluster, "foo-site", "v1", properties, propertiesAttributes);
+ configFactory.createNew(cluster, "foo-site", "v1", properties, propertiesAttributes);
Map<String, String> mapRequestProps = new HashMap<>();
mapRequestProps.put("context", "Called from a test");
@@ -2019,7 +2014,7 @@ public class AmbariManagementControllerTest {
configs.put(c1.getType(), c1);
configs.put(c2.getType(), c2);
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.2", "0.2-1234",
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
@@ -2128,7 +2123,7 @@ public class AmbariManagementControllerTest {
}
}
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.2", "0.2-1234", State.STARTED.toString(),
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion02.getId(), State.STARTED.toString(),
null);
requests.clear();
requests.add(r);
@@ -2175,7 +2170,7 @@ public class AmbariManagementControllerTest {
}
}
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.2", "0.2-1234", State.INSTALLED.toString(),
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion02.getId(), State.INSTALLED.toString(),
null);
requests.clear();
requests.add(r);
@@ -2278,7 +2273,7 @@ public class AmbariManagementControllerTest {
c1.addService(s1);
s1.setDesiredState(State.INSTALLED);
- ServiceRequest r = new ServiceRequest(cluster1, null, null, null, null, null);
+ ServiceRequest r = new ServiceRequest(cluster1, null, null, null, null);
Set<ServiceResponse> resp = ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
ServiceResponse resp1 = resp.iterator().next();
@@ -2324,7 +2319,7 @@ public class AmbariManagementControllerTest {
s2.setDesiredState(State.INSTALLED);
s4.setDesiredState(State.INSTALLED);
- ServiceRequest r = new ServiceRequest(null, null, null, null, null, null);
+ ServiceRequest r = new ServiceRequest(null, null, null, null, null);
Set<ServiceResponse> resp;
try {
@@ -2334,35 +2329,35 @@ public class AmbariManagementControllerTest {
// Expected
}
- r = new ServiceRequest(c1.getClusterName(), null, null, null, null, null);
+ r = new ServiceRequest(c1.getClusterName(), null, null, null, null);
resp = ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
Assert.assertEquals(3, resp.size());
- r = new ServiceRequest(c1.getClusterName(), s2.getName(), null, null, null, null);
+ r = new ServiceRequest(c1.getClusterName(), s2.getName(), null, null, null);
resp = ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
Assert.assertEquals(1, resp.size());
Assert.assertEquals(s2.getName(), resp.iterator().next().getServiceName());
try {
- r = new ServiceRequest(c2.getClusterName(), s1.getName(), null, null, null, null);
+ r = new ServiceRequest(c2.getClusterName(), s1.getName(), null, null, null);
ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
fail("Expected failure for invalid service");
} catch (Exception e) {
// Expected
}
- r = new ServiceRequest(c1.getClusterName(), null, null, null, "INSTALLED", null);
+ r = new ServiceRequest(c1.getClusterName(), null, null, "INSTALLED", null);
resp = ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
Assert.assertEquals(2, resp.size());
- r = new ServiceRequest(c2.getClusterName(), null, null, null, "INIT", null);
+ r = new ServiceRequest(c2.getClusterName(), null, null, "INIT", null);
resp = ServiceResourceProviderTest.getServices(controller, Collections.singleton(r));
Assert.assertEquals(1, resp.size());
ServiceRequest r1, r2, r3;
- r1 = new ServiceRequest(c1.getClusterName(), null, null, null, "INSTALLED", null);
- r2 = new ServiceRequest(c2.getClusterName(), null, null, null, "INIT", null);
- r3 = new ServiceRequest(c2.getClusterName(), null, null, null, "INIT", null);
+ r1 = new ServiceRequest(c1.getClusterName(), null, null, "INSTALLED", null);
+ r2 = new ServiceRequest(c2.getClusterName(), null, null, "INIT", null);
+ r3 = new ServiceRequest(c2.getClusterName(), null, null, "INIT", null);
Set<ServiceRequest> reqs = new HashSet<>();
reqs.addAll(Arrays.asList(r1, r2, r3));
@@ -3164,7 +3159,7 @@ public class AmbariManagementControllerTest {
ServiceRequest r;
try {
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.2", "0.2-1234",
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion02.getId(),
State.INSTALLING.toString(), null);
reqs.clear();
reqs.add(r);
@@ -3174,7 +3169,7 @@ public class AmbariManagementControllerTest {
// Expected
}
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.2", "0.2-1234", State.INSTALLED.toString(),
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion02.getId(), State.INSTALLED.toString(),
null);
reqs.clear();
reqs.add(r);
@@ -3220,9 +3215,9 @@ public class AmbariManagementControllerTest {
ServiceRequest req1, req2;
try {
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
- req2 = new ServiceRequest(cluster2, serviceName2, "HDP-0.2", "0.2-1234",
+ req2 = new ServiceRequest(cluster2, serviceName2, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
reqs.add(req1);
reqs.add(req2);
@@ -3234,9 +3229,9 @@ public class AmbariManagementControllerTest {
try {
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
- req2 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req2 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
reqs.add(req1);
reqs.add(req2);
@@ -3251,9 +3246,9 @@ public class AmbariManagementControllerTest {
try {
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.INSTALLED.toString(), null);
- req2 = new ServiceRequest(cluster1, serviceName2, "HDP-0.2", "0.2-1234",
+ req2 = new ServiceRequest(cluster1, serviceName2, repositoryVersion02.getId(),
State.STARTED.toString(), null);
reqs.add(req1);
reqs.add(req2);
@@ -3365,7 +3360,7 @@ public class AmbariManagementControllerTest {
ServiceRequest req1, req2;
try {
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.STARTED.toString(), null);
reqs.add(req1);
ServiceResourceProviderTest.updateServices(controller, reqs, mapRequestProps, true, false);
@@ -3392,7 +3387,7 @@ public class AmbariManagementControllerTest {
try {
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.STARTED.toString(), null);
reqs.add(req1);
ServiceResourceProviderTest.updateServices(controller, reqs, mapRequestProps, true, false);
@@ -3420,9 +3415,9 @@ public class AmbariManagementControllerTest {
sch5.setState(State.INSTALLED);
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.STARTED.toString(), null);
- req2 = new ServiceRequest(cluster1, serviceName2, "HDP-0.2", "0.2-1234",
+ req2 = new ServiceRequest(cluster1, serviceName2, repositoryVersion02.getId(),
State.STARTED.toString(), null);
reqs.add(req1);
reqs.add(req2);
@@ -3508,9 +3503,9 @@ public class AmbariManagementControllerTest {
// test no-op
reqs.clear();
- req1 = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ req1 = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.STARTED.toString(), null);
- req2 = new ServiceRequest(cluster1, serviceName2, "HDP-0.2", "0.2-1234",
+ req2 = new ServiceRequest(cluster1, serviceName2, repositoryVersion02.getId(),
State.STARTED.toString(), null);
reqs.add(req1);
reqs.add(req2);
@@ -4681,7 +4676,7 @@ public class AmbariManagementControllerTest {
.getServiceComponentHost(host2));
// Install
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234",
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(),
State.INSTALLED.toString(), null);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -4701,7 +4696,7 @@ public class AmbariManagementControllerTest {
}
// Start
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234",
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(),
State.STARTED.toString(), null);
requests.clear();
requests.add(r);
@@ -4793,7 +4788,7 @@ public class AmbariManagementControllerTest {
configVersions.put("typeC", "v2");
configVersions.put("typeE", "v1");
sReqs.clear();
- sReqs.add(new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", null, null));
+ sReqs.add(new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), null, null));
Assert.assertNull(ServiceResourceProviderTest.updateServices(controller, sReqs, mapRequestProps, true, false));
@@ -4942,7 +4937,7 @@ public class AmbariManagementControllerTest {
configVersions.put("typeC", "v2");
configVersions.put("typeE", "v1");
sReqs.clear();
- sReqs.add(new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", null, null));
+ sReqs.add(new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), null, null));
Assert.assertNull(ServiceResourceProviderTest.updateServices(controller, sReqs, mapRequestProps, true, false));
// update configs at SCH level
@@ -5008,7 +5003,7 @@ public class AmbariManagementControllerTest {
host2, null);
// Install
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234",
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(),
State.INSTALLED.toString());
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -5108,7 +5103,7 @@ public class AmbariManagementControllerTest {
configVersions.put("core-site", "version1");
configVersions.put("hdfs-site", "version1");
sReqs.clear();
- sReqs.add(new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", null));
+ sReqs.add(new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), null));
Assert.assertNull(ServiceResourceProviderTest.updateServices(controller, sReqs, mapRequestProps, true, false));
// Reconfigure S Level
@@ -5116,7 +5111,7 @@ public class AmbariManagementControllerTest {
configVersions.put("core-site", "version122");
sReqs.clear();
- sReqs.add(new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", null));
+ sReqs.add(new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), null));
Assert.assertNull(ServiceResourceProviderTest.updateServices(controller, sReqs, mapRequestProps, true, false));
entityManager.clear();
@@ -5429,7 +5424,7 @@ public class AmbariManagementControllerTest {
createServiceComponentHost(cluster1, null, componentName1,
host2, null);
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234",
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(),
State.INSTALLED.toString());
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -5466,7 +5461,7 @@ public class AmbariManagementControllerTest {
}
}
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", State.STARTED.toString());
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), State.STARTED.toString());
requests.clear();
requests.add(r);
@@ -5890,7 +5885,7 @@ public class AmbariManagementControllerTest {
// Start Service
ServiceRequest sr = new ServiceRequest(
- cluster1, serviceName, "HDP-2.0.6", "2.0.6-1234", State.STARTED.name());
+ cluster1, serviceName, repositoryVersion206.getId(), State.STARTED.name());
Set<ServiceRequest> setReqs = new HashSet<>();
setReqs.add(sr);
RequestStatusResponse resp = ServiceResourceProviderTest.updateServices(controller,
@@ -6120,12 +6115,12 @@ public class AmbariManagementControllerTest {
RepositoryVersionEntity repositoryVersion = repositoryVersion206;
ConfigFactory cf = injector.getInstance(ConfigFactory.class);
- Config config1 = cf.createNew(cluster, "global", "version1",
+ cf.createNew(cluster, "global", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
- Config config2 = cf.createNew(cluster, "core-site", "version1",
+ cf.createNew(cluster, "core-site", "version1",
new HashMap<String, String>() {{
put("key1", "value1");
}}, new HashMap<String, Map<String,String>>());
@@ -6493,7 +6488,7 @@ public class AmbariManagementControllerTest {
put("core-site", "version1");
put("hdfs-site", "version1");
}};
- ServiceRequest sr = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", null);
+ ServiceRequest sr = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), null);
ServiceResourceProviderTest.updateServices(controller, Collections.singleton(sr), new HashMap<String,String>(), false, false);
// Install
@@ -6544,7 +6539,7 @@ public class AmbariManagementControllerTest {
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234",
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(),
State.INSTALLED.toString());
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -7536,7 +7531,7 @@ public class AmbariManagementControllerTest {
.getServiceComponentHost(host2));
// Install
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", State.INSTALLED.toString());
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), State.INSTALLED.toString());
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -7555,7 +7550,7 @@ public class AmbariManagementControllerTest {
}
// Start
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", State.STARTED.toString());
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), State.STARTED.toString());
requests.clear();
requests.add(r);
ServiceResourceProviderTest.updateServices(controller, requests, mapRequestProps, true, false);
@@ -7600,7 +7595,7 @@ public class AmbariManagementControllerTest {
}
// Stop all services
- r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", State.INSTALLED.toString());
+ r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), State.INSTALLED.toString());
requests.clear();
requests.add(r);
ServiceResourceProviderTest.updateServices(controller, requests, mapRequestProps, true, false);
@@ -7800,7 +7795,7 @@ public class AmbariManagementControllerTest {
// Install
- ServiceRequest r = new ServiceRequest(cluster1, serviceName, "HDP-0.1", "0.1-1234", State.INSTALLED.toString());
+ ServiceRequest r = new ServiceRequest(cluster1, serviceName, repositoryVersion01.getId(), State.INSTALLED.toString());
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r);
@@ -7898,7 +7893,7 @@ public class AmbariManagementControllerTest {
sch3.setState(State.INSTALLED);
// an UNKOWN failure will throw an exception
- ServiceRequest req = new ServiceRequest(cluster1, serviceName1, "HDP-0.2", "0.2-1234",
+ ServiceRequest req = new ServiceRequest(cluster1, serviceName1, repositoryVersion02.getId(),
State.INSTALLED.toString());
ServiceResourceProviderTest.updateServices(controller, Collections.singleton(req), Collections.<String, String>emptyMap(), true, false);
}
@@ -8425,7 +8420,7 @@ public class AmbariManagementControllerTest {
amc.createCluster(clusterRequest);
Set<ServiceRequest> serviceRequests = new HashSet<>();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), null));
ServiceResourceProviderTest.createServices(amc, repositoryVersionDAO, serviceRequests);
@@ -8486,9 +8481,9 @@ public class AmbariManagementControllerTest {
HostResourceProviderTest.createHosts(amc, hrs);
Set<ServiceRequest> serviceRequests = new HashSet<>();
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", STACK_ID, "2.0.1-1234", null));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", STACK_ID, "2.0.1-1234", null));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", STACK_ID, "2.0.1-1234", null));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", repositoryVersion201.getId(), null));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", repositoryVersion201.getId(), null));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", repositoryVersion201.getId(), null));
ServiceResourceProviderTest.createServices(amc, repositoryVersionDAO, serviceRequests);
@@ -8562,8 +8557,8 @@ public class AmbariManagementControllerTest {
amc.createCluster(clusterRequest);
Set<ServiceRequest> serviceRequests = new HashSet<>();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", null));
- serviceRequests.add(new ServiceRequest(cluster1, "HIVE", "HDP-1.2.0", "1.2.0-1234", null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HIVE", repositoryVersion120.getId(), null));
ServiceResourceProviderTest.createServices(amc, repositoryVersionDAO, serviceRequests);
@@ -8584,7 +8579,7 @@ public class AmbariManagementControllerTest {
Assert.assertTrue(clusters.getCluster(cluster1).getDesiredConfigs().containsKey("hive-site"));
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), null));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);
@@ -8614,7 +8609,7 @@ public class AmbariManagementControllerTest {
amc.createHostComponents(componentHostRequests);
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", "INSTALLED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), "INSTALLED"));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);
Cluster cluster = clusters.getCluster(cluster1);
@@ -8681,7 +8676,7 @@ public class AmbariManagementControllerTest {
componentHost.handleEvent(new ServiceComponentHostOpSucceededEvent(componentHost.getServiceComponentName(), componentHost.getHostName(), System.currentTimeMillis()));
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", "STARTED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), "STARTED"));
RequestStatusResponse response = ServiceResourceProviderTest.updateServices(amc, serviceRequests,
mapRequestProps, true, false);
@@ -8743,14 +8738,14 @@ public class AmbariManagementControllerTest {
// ServiceComponentHost remains in disabled after service stop
assertEquals(sch.getServiceComponentName(),"DATANODE");
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", "INSTALLED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), "INSTALLED"));
ServiceResourceProviderTest.updateServices(amc, serviceRequests,
mapRequestProps, true, false);
assertEquals(State.DISABLED, sch.getState());
// ServiceComponentHost remains in disabled after service start
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", "STARTED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), "STARTED"));
ServiceResourceProviderTest.updateServices(amc, serviceRequests,
mapRequestProps, true, false);
assertEquals(State.DISABLED, sch.getState());
@@ -8772,14 +8767,14 @@ public class AmbariManagementControllerTest {
*Test remove service
*/
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", "INSTALLED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), "INSTALLED"));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);
serviceRequests.clear();
serviceRequests.add(new ServiceRequest(cluster1, null, null, null, null));
org.junit.Assert.assertEquals(2, ServiceResourceProviderTest.getServices(amc, serviceRequests).size());
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", null));
- serviceRequests.add(new ServiceRequest(cluster1, "HIVE", "HDP-1.2.0", "1.2.0-1234", null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HIVE", repositoryVersion120.getId(), null));
ServiceResourceProviderTest.deleteServices(amc, serviceRequests);
serviceRequests.clear();
serviceRequests.add(new ServiceRequest(cluster1, null, null, null, null));
@@ -8789,7 +8784,7 @@ public class AmbariManagementControllerTest {
*Test add service again
*/
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), null));
ServiceResourceProviderTest.createServices(amc, repositoryVersionDAO, serviceRequests);
@@ -8806,7 +8801,7 @@ public class AmbariManagementControllerTest {
amc.createConfiguration(configurationRequest);
//Add configs to service
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-1.2.0", "1.2.0-1234", null));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion120.getId(), null));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);
//Crate service components
serviceComponentRequests = new HashSet<>();
@@ -8861,9 +8856,9 @@ public class AmbariManagementControllerTest {
amc.createCluster(clusterRequest);
Set<ServiceRequest> serviceRequests = new HashSet<>();
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", "HDP-2.0.1", "2.0.1-1234", null));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", "HDP-2.0.1", "2.0.1-1234", null));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", "HDP-2.0.1", "2.0.1-1234", null));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", repositoryVersion201.getId(), null));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", repositoryVersion201.getId(), null));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", repositoryVersion201.getId(), null));
ServiceResourceProviderTest.createServices(amc, repositoryVersionDAO, serviceRequests);
@@ -8894,9 +8889,9 @@ public class AmbariManagementControllerTest {
//Install services
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", "HDP-2.0.1", "2.0.1-1234", State.INSTALLED.name()));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", "HDP-2.0.1", "2.0.1-1234", State.INSTALLED.name()));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", "HDP-2.0.1", "2.0.1-1234", State.INSTALLED.name()));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", repositoryVersion201.getId(), State.INSTALLED.name()));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", repositoryVersion201.getId(), State.INSTALLED.name()));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", repositoryVersion201.getId(), State.INSTALLED.name()));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, true, false);
@@ -8918,9 +8913,9 @@ public class AmbariManagementControllerTest {
//Start services
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", "HDP-2.0.1", "2.0.1-1234", State.STARTED.name()));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", "HDP-2.0.1", "2.0.1-1234", State.STARTED.name()));
- serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", "HDP-2.0.1", "2.0.1-1234", State.STARTED.name()));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "HDFS", repositoryVersion201.getId(), State.STARTED.name()));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "MAPREDUCE2", repositoryVersion201.getId(), State.STARTED.name()));
+ serviceRequests.add(new ServiceRequest(CLUSTER_NAME, "YARN", repositoryVersion201.getId(), State.STARTED.name()));
RequestStatusResponse response = ServiceResourceProviderTest.updateServices(amc, serviceRequests,
mapRequestProps, true, false);
@@ -9102,7 +9097,7 @@ public class AmbariManagementControllerTest {
//Stopping HDFS service
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", "INSTALLED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), "INSTALLED"));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, false,
false);
@@ -9110,7 +9105,7 @@ public class AmbariManagementControllerTest {
// test(HDFS_SERVICE_CHECK) won't run
boolean runSmokeTest = false;
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", "STARTED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), "STARTED"));
response = ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps,
runSmokeTest, false);
@@ -9125,7 +9120,7 @@ public class AmbariManagementControllerTest {
//Stopping HDFS service
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", "INSTALLED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), "INSTALLED"));
ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps, false,
false);
@@ -9133,7 +9128,7 @@ public class AmbariManagementControllerTest {
//run_smoke_test flag is set, smoke test will be run
runSmokeTest = true;
serviceRequests.clear();
- serviceRequests.add(new ServiceRequest(cluster1, "HDFS", "HDP-0.2", "0.2-1234", "STARTED"));
+ serviceRequests.add(new ServiceRequest(cluster1, "HDFS", repositoryVersion02.getId(), "STARTED"));
response = ServiceResourceProviderTest.updateServices(amc, serviceRequests, mapRequestProps,
runSmokeTest, false);
@@ -9373,7 +9368,7 @@ public class AmbariManagementControllerTest {
MaintenanceStateHelper maintenanceStateHelper = MaintenanceStateHelperTest.getMaintenanceStateHelperInstance(clusters);
// test updating a service
- ServiceRequest sr = new ServiceRequest(cluster1, serviceName, "HDP-1.2.0", "1.2.0-1234", null);
+ ServiceRequest sr = new ServiceRequest(cluster1, serviceName, repositoryVersion120.getId(), null);
sr.setMaintenanceState(MaintenanceState.ON.name());
ServiceResourceProviderTest.updateServices(controller,
Collections.singleton(sr), requestProperties, false, false,
@@ -9590,7 +9585,7 @@ public class AmbariManagementControllerTest {
MaintenanceStateHelperTest.getMaintenanceStateHelperInstance(clusters);
// test updating a service
- ServiceRequest sr = new ServiceRequest(cluster1, service1Name, "HDP-2.2.0", "2.2.0-1234", null);
+ ServiceRequest sr = new ServiceRequest(cluster1, service1Name, repositoryVersion220.getId(), null);
sr.setCredentialStoreEnabled("true");
ServiceResourceProviderTest.updateServices(controller,
@@ -9600,7 +9595,7 @@ public class AmbariManagementControllerTest {
Assert.assertTrue(service1.isCredentialStoreSupported());
Assert.assertFalse(service1.isCredentialStoreRequired());
- ServiceRequest sr2 = new ServiceRequest(cluster1, service2Name, "HDP-2.2.0", "2.2.0-1234", null);
+ ServiceRequest sr2 = new ServiceRequest(cluster1, service2Name, repositoryVersion220.getId(), null);
sr2.setCredentialStoreEnabled("true");
try {
ServiceResourceProviderTest.updateServices(controller,
@@ -9612,7 +9607,7 @@ public class AmbariManagementControllerTest {
"Invalid arguments, cannot enable credential store as it is not supported by the service. Service=STORM"));
}
- ServiceRequest sr3 = new ServiceRequest(cluster1, service3Name, "HDP-2.2.0", "2.2.0-1234", null);
+ ServiceRequest sr3 = new ServiceRequest(cluster1, service3Name, repositoryVersion220.getId(), null);
sr3.setCredentialStoreEnabled("false");
try {
ServiceResourceProviderTest.updateServices(controller,
@@ -9624,7 +9619,7 @@ public class AmbariManagementControllerTest {
"Invalid arguments, cannot disable credential store as it is required by the service. Service=ZOOKEEPER"));
}
- ServiceRequest sr4 = new ServiceRequest(cluster1, service3Name, "HDP-2.2.0", "2.2.0-1234", null);
+ ServiceRequest sr4 = new ServiceRequest(cluster1, service3Name, repositoryVersion220.getId(), null);
sr4.setCredentialStoreSupported("true");
try {
ServiceResourceProviderTest.updateServices(controller,
@@ -9701,8 +9696,8 @@ public class AmbariManagementControllerTest {
service2.setMaintenanceState(MaintenanceState.ON);
Set<ServiceRequest> srs = new HashSet<>();
- srs.add(new ServiceRequest(cluster1, serviceName1, "HDP-0.1", "0.1-1234", State.INSTALLED.name()));
- srs.add(new ServiceRequest(cluster1, serviceName2, "HDP-0.1", "0.1-1234", State.INSTALLED.name()));
+ srs.add(new ServiceRequest(cluster1, serviceName1, repositoryVersion01.getId(), State.INSTALLED.name()));
+ srs.add(new ServiceRequest(cluster1, serviceName2, repositoryVersion01.getId(), State.INSTALLED.name()));
RequestStatusResponse rsr = ServiceResourceProviderTest.updateServices(controller, srs,
requestProperties, false, false, maintenanceStateHelper);
@@ -9735,8 +9730,8 @@ public class AmbariManagementControllerTest {
h1.setMaintenanceState(cluster.getClusterId(), MaintenanceState.ON);
srs = new HashSet<>();
- srs.add(new ServiceRequest(cluster1, serviceName1, "HDP-0.1", "0.1-1234", State.INSTALLED.name()));
- srs.add(new ServiceRequest(cluster1, serviceName2, "HDP-0.1", "0.1-1234", State.INSTALLED.name()));
+ srs.add(new ServiceRequest(cluster1, serviceName1, repositoryVersion01.getId(), State.INSTALLED.name()));
+ srs.add(new ServiceRequest(cluster1, serviceName2, repositoryVersion01.getId(), State.INSTALLED.name()));
rsr = ServiceResourceProviderTest.updateServices(controller, srs, requestProperties,
false, false, maintenanceStateHelper);
@@ -9750,7 +9745,7 @@ public class AmbariManagementControllerTest {
service2.setMaintenanceState(MaintenanceState.ON);
- ServiceRequest sr = new ServiceRequest(cluster1, serviceName2, "HDP-0.1", "0.1-1234", State.INSTALLED.name());
+ ServiceRequest sr = new ServiceRequest(cluster1, serviceName2, repositoryVersion01.getId(), State.INSTALLED.name());
rsr = ServiceResourceProviderTest.updateServices(controller,
Collections.singleton(sr), requestProperties, false, false, maintenanceStateHelper);
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
index 8ce5b26..4c237a7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/BackgroundCustomCommandExecutionTest.java
@@ -218,7 +218,7 @@ public class BackgroundCustomCommandExecutionTest {
dStateStr = desiredState.toString();
}
ServiceRequest r1 = new ServiceRequest(clusterName, serviceName,
- m_repositoryVersion.getStackId().getStackId(), m_repositoryVersion.getVersion(), dStateStr);
+ m_repositoryVersion.getId(), m_repositoryVersion.getVersion(), dStateStr);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r1);
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/ClusterResponseTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/ClusterResponseTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/ClusterResponseTest.java
index 270086f..35a1af3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/ClusterResponseTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/ClusterResponseTest.java
@@ -1,6 +1,4 @@
-package org.apache.ambari.server.controller;
-
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,6 +15,7 @@ package org.apache.ambari.server.controller;
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+package org.apache.ambari.server.controller;
import java.util.HashSet;
import java.util.Set;
@@ -30,7 +29,7 @@ public class ClusterResponseTest {
@Test
public void testBasicGetAndSet() {
- long clusterId = new Long(10);
+ long clusterId = 10L;
String clusterName = "foo";
State provisioningState = State.INSTALLED;
SecurityType securityType = SecurityType.KERBEROS;
@@ -40,12 +39,11 @@ public class ClusterResponseTest {
ClusterResponse r1 =
new ClusterResponse(clusterId, clusterName, provisioningState, securityType,
hostNames, hostNames.size(), "bar", null);
-
+
Assert.assertEquals(clusterId, r1.getClusterId());
Assert.assertEquals(clusterName, r1.getClusterName());
- Assert.assertEquals(provisioningState.name(), r1.getProvisioningState());
- Assert.assertEquals(securityType.name(), r1.getSecurityType());
- Assert.assertArrayEquals(hostNames.toArray(), r1.getHostNames().toArray());
+ Assert.assertEquals(provisioningState, r1.getProvisioningState());
+ Assert.assertEquals(securityType, r1.getSecurityType());
Assert.assertEquals(1, r1.getTotalHosts());
Assert.assertEquals("bar", r1.getDesiredStackVersion());
}
@@ -53,7 +51,7 @@ public class ClusterResponseTest {
@Test
public void testToString() {
ClusterResponse r =
- new ClusterResponse(null, null, null, null, null, null, null, null);
+ new ClusterResponse(0, null, null, null, null, 0, null, null);
r.toString();
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
index d7cbe06..122e4ef 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/RefreshYarnCapacitySchedulerReleaseConfigTest.java
@@ -214,7 +214,7 @@ public class RefreshYarnCapacitySchedulerReleaseConfigTest {
new StackId("HDP-2.0.7"), "2.0.7-1234");
ServiceRequest r1 = new ServiceRequest(clusterName, serviceName,
- repositoryVersion.getStackId().getStackId(), repositoryVersion.getVersion(), dStateStr);
+ repositoryVersion.getId(), repositoryVersion.getVersion(), dStateStr);
Set<ServiceRequest> requests = new HashSet<>();
requests.add(r1);
[3/3] ambari git commit: AMBARI-21450 - Fixing Unit Test Logic From
trunk Merge (jonathanhurley)
Posted by jo...@apache.org.
AMBARI-21450 - Fixing Unit Test Logic From trunk Merge (jonathanhurley)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/15cd3d83
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/15cd3d83
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/15cd3d83
Branch: refs/heads/branch-feature-AMBARI-21450
Commit: 15cd3d8379178006a0ee42b949217fa511351435
Parents: 51e3080
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Mon Jul 24 13:15:01 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Mon Jul 24 13:15:09 2017 -0400
----------------------------------------------------------------------
.../actionmanager/ExecutionCommandWrapper.java | 11 +-
.../ambari/server/agent/HeartBeatHandler.java | 116 ++------
.../request/eventcreator/HostEventCreator.java | 6 +-
.../AmbariManagementControllerImpl.java | 151 ++++------
.../server/controller/ClusterRequest.java | 15 -
.../server/controller/ClusterResponse.java | 102 ++-----
.../server/controller/KerberosHelperImpl.java | 285 ++++++++++---------
.../server/controller/ServiceRequest.java | 24 +-
.../internal/ClusterResourceProvider.java | 33 +--
.../internal/ComponentResourceProvider.java | 10 +-
.../internal/ServiceResourceProvider.java | 104 ++++---
.../orm/entities/ServiceConfigEntity.java | 25 +-
.../upgrades/ComponentVersionCheckAction.java | 2 +-
.../serveraction/upgrades/ConfigureAction.java | 38 ++-
.../ambari/server/topology/AmbariContext.java | 26 +-
.../ExecutionCommandWrapperTest.java | 1 +
.../ambari/server/agent/AgentResourceTest.java | 12 +-
.../server/api/services/AmbariMetaInfoTest.java | 4 +-
.../request/creator/HostEventCreatorTest.java | 10 +-
.../AmbariCustomCommandExecutionHelperTest.java | 2 +-
.../AmbariManagementControllerImplTest.java | 26 +-
.../AmbariManagementControllerTest.java | 221 +++++++-------
.../BackgroundCustomCommandExecutionTest.java | 2 +-
.../server/controller/ClusterResponseTest.java | 18 +-
...hYarnCapacitySchedulerReleaseConfigTest.java | 2 +-
.../internal/ClusterResourceProviderTest.java | 105 ++++---
.../internal/JMXHostProviderTest.java | 3 +-
.../internal/ServiceResourceProviderTest.java | 2 +-
.../ComponentVersionCheckActionTest.java | 4 +-
.../upgrades/ConfigureActionTest.java | 66 +++--
30 files changed, 652 insertions(+), 774 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
index 91db7d0..e4b2540 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
@@ -26,6 +26,7 @@ import java.util.TreeMap;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.ClusterNotFoundException;
+import org.apache.ambari.server.RoleCommand;
import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.agent.AgentCommand.AgentCommandType;
import org.apache.ambari.server.agent.ExecutionCommand;
@@ -224,8 +225,14 @@ public class ExecutionCommandWrapper {
Map<String, String> commandParams = executionCommand.getCommandParams();
if (null != repositoryVersion) {
- commandParams.put(KeyNames.VERSION, repositoryVersion.getVersion());
- executionCommand.getHostLevelParams().put(KeyNames.CURRENT_VERSION, repositoryVersion.getVersion());
+ // only set the version if it's not set and this is NOT an install
+ // command
+ if (!commandParams.containsKey(KeyNames.VERSION)
+ && executionCommand.getRoleCommand() != RoleCommand.INSTALL) {
+ commandParams.put(KeyNames.VERSION, repositoryVersion.getVersion());
+ executionCommand.getHostLevelParams().put(KeyNames.CURRENT_VERSION, repositoryVersion.getVersion());
+
+ }
StackId stackId = repositoryVersion.getStackId();
StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(),
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
index d3ea24b..5d4b338 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartBeatHandler.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -22,9 +22,7 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -33,51 +31,22 @@ import java.util.regex.Pattern;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.HostNotFoundException;
-import org.apache.ambari.server.Role;
-import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.ServiceComponentHostNotFoundException;
-import org.apache.ambari.server.ServiceComponentNotFoundException;
-import org.apache.ambari.server.ServiceNotFoundException;
import org.apache.ambari.server.actionmanager.ActionManager;
-import org.apache.ambari.server.actionmanager.HostRoleCommand;
-import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.MaintenanceStateHelper;
-import org.apache.ambari.server.events.ActionFinalReportReceivedEvent;
-import org.apache.ambari.server.events.AlertEvent;
-import org.apache.ambari.server.events.AlertReceivedEvent;
-import org.apache.ambari.server.events.HostComponentVersionAdvertisedEvent;
-import org.apache.ambari.server.events.publishers.AlertEventPublisher;
-import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
-import org.apache.ambari.server.events.publishers.VersionEventPublisher;
-import org.apache.ambari.server.metadata.ActionMetadata;
-import org.apache.ambari.server.orm.dao.HostDAO;
-import org.apache.ambari.server.orm.dao.KerberosPrincipalHostDAO;
-import org.apache.ambari.server.orm.entities.HostEntity;
import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReader;
import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileReaderFactory;
import org.apache.ambari.server.serveraction.kerberos.KerberosServerAction;
import org.apache.ambari.server.state.AgentVersion;
-import org.apache.ambari.server.state.Alert;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.ComponentInfo;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.Host;
-import org.apache.ambari.server.state.HostHealthStatus;
-import org.apache.ambari.server.state.HostHealthStatus.HealthStatus;
import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.MaintenanceState;
-import org.apache.ambari.server.state.SecurityState;
-import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceComponent;
import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
-import org.apache.ambari.server.state.StackInfo;
-import org.apache.ambari.server.state.State;
-import org.apache.ambari.server.state.UpgradeState;
import org.apache.ambari.server.state.alert.AlertDefinition;
import org.apache.ambari.server.state.alert.AlertDefinitionHash;
import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
@@ -85,26 +54,14 @@ import org.apache.ambari.server.state.host.HostHealthyHeartbeatEvent;
import org.apache.ambari.server.state.host.HostRegistrationRequestEvent;
import org.apache.ambari.server.state.host.HostStatusUpdatesReceivedEvent;
import org.apache.ambari.server.state.host.HostUnhealthyHeartbeatEvent;
-import org.apache.ambari.server.state.scheduler.RequestExecution;
-import org.apache.ambari.server.state.stack.upgrade.Direction;
-import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpFailedEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpInProgressEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostOpSucceededEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartedEvent;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStoppedEvent;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.ambari.server.utils.VersionUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.gson.Gson;
-import com.google.gson.JsonSyntaxException;
-import com.google.gson.annotations.SerializedName;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Singleton;
@@ -129,9 +86,6 @@ public class HeartBeatHandler {
private HeartbeatProcessor heartbeatProcessor;
@Inject
- private Injector injector;
-
- @Inject
private Configuration config;
@Inject
@@ -152,9 +106,9 @@ public class HeartBeatHandler {
@Inject
private KerberosIdentityDataFileReaderFactory kerberosIdentityDataFileReaderFactory;
- private Map<String, Long> hostResponseIds = new ConcurrentHashMap<String, Long>();
+ private Map<String, Long> hostResponseIds = new ConcurrentHashMap<>();
- private Map<String, HeartBeatResponse> hostResponses = new ConcurrentHashMap<String, HeartBeatResponse>();
+ private Map<String, HeartBeatResponse> hostResponses = new ConcurrentHashMap<>();
@Inject
public HeartBeatHandler(Clusters fsm, ActionQueue aq, ActionManager am,
@@ -201,10 +155,7 @@ public class HeartBeatHandler {
return createRegisterCommand();
}
- LOG.debug("Received heartbeat from host"
- + ", hostname=" + hostname
- + ", currentResponseId=" + currentResponseId
- + ", receivedResponseId=" + heartbeat.getResponseId());
+ LOG.debug("Received heartbeat from host, hostname={}, currentResponseId={}, receivedResponseId={}", hostname, currentResponseId, heartbeat.getResponseId());
if (heartbeat.getResponseId() == currentResponseId - 1) {
HeartBeatResponse heartBeatResponse = hostResponses.get(hostname);
@@ -279,11 +230,10 @@ public class HeartBeatHandler {
return createRegisterCommand();
}
- /**
+ /*
* A host can belong to only one cluster. Though getClustersForHost(hostname)
* returns a set of clusters, it will have only one entry.
*
- *
* TODO: Handle the case when a host is a part of multiple clusters.
*/
Set<Cluster> clusters = clusterFsm.getClustersForHost(hostname);
@@ -296,7 +246,7 @@ public class HeartBeatHandler {
response.setRecoveryConfig(rc);
if (response.getRecoveryConfig() != null) {
- LOG.info("Recovery configuration set to {}", response.getRecoveryConfig().toString());
+ LOG.info("Recovery configuration set to {}", response.getRecoveryConfig());
}
}
}
@@ -315,7 +265,7 @@ public class HeartBeatHandler {
protected void processRecoveryReport(RecoveryReport recoveryReport, String hostname) throws AmbariException {
- LOG.debug("Received recovery report: " + recoveryReport.toString());
+ LOG.debug("Received recovery report: {}", recoveryReport);
Host host = clusterFsm.getHost(hostname);
host.setRecoveryReport(recoveryReport);
}
@@ -330,7 +280,7 @@ public class HeartBeatHandler {
for (AgentCommand ac : cmds) {
try {
if (LOG.isDebugEnabled()) {
- LOG.debug("Sending command string = " + StageUtils.jaxbToString(ac));
+ LOG.debug("Sending command string = {}", StageUtils.jaxbToString(ac));
}
} catch (Exception e) {
throw new AmbariException("Could not get jaxb string for command", e);
@@ -493,10 +443,10 @@ public class HeartBeatHandler {
response.setAgentConfig(config.getAgentConfigsMap());
if(response.getAgentConfig() != null) {
- LOG.debug("Agent configuration map set to " + response.getAgentConfig());
+ LOG.debug("Agent configuration map set to {}", response.getAgentConfig());
}
- /**
+ /*
* A host can belong to only one cluster. Though getClustersForHost(hostname)
* returns a set of clusters, it will have only one entry.
*
@@ -511,7 +461,7 @@ public class HeartBeatHandler {
response.setRecoveryConfig(rc);
if(response.getRecoveryConfig() != null) {
- LOG.info("Recovery configuration set to " + response.getRecoveryConfig().toString());
+ LOG.info("Recovery configuration set to " + response.getRecoveryConfig());
}
}
@@ -542,7 +492,7 @@ public class HeartBeatHandler {
}
if(actionQueue.hasPendingTask(hostname)) {
- LOG.debug("Host " + hostname + " has pending tasks");
+ LOG.debug("Host {} has pending tasks", hostname);
response.setHasPendingTasks(true);
}
}
@@ -558,36 +508,26 @@ public class HeartBeatHandler {
ComponentsResponse response = new ComponentsResponse();
Cluster cluster = clusterFsm.getCluster(clusterName);
- StackId stackId = cluster.getCurrentStackVersion();
- if (stackId == null) {
- throw new AmbariException("Cannot provide stack components map. " +
- "Stack hasn't been selected yet.");
- }
- StackInfo stack = ambariMetaInfo.getStack(stackId.getStackName(),
- stackId.getStackVersion());
- response.setClusterName(clusterName);
- response.setStackName(stackId.getStackName());
- response.setStackVersion(stackId.getStackVersion());
- response.setComponents(getComponentsMap(stack));
+ Map<String, Map<String, String>> componentsMap = new HashMap<>();
- return response;
- }
+ for (org.apache.ambari.server.state.Service service : cluster.getServices().values()) {
+ componentsMap.put(service.getName(), new HashMap<String, String>());
- private Map<String, Map<String, String>> getComponentsMap(StackInfo stack) {
- Map<String, Map<String, String>> result = new HashMap<String, Map<String, String>>();
+ for (ServiceComponent component : service.getServiceComponents().values()) {
+ StackId stackId = component.getDesiredStackId();
- for (ServiceInfo service : stack.getServices()) {
- Map<String, String> components = new HashMap<String, String>();
+ ComponentInfo componentInfo = ambariMetaInfo.getComponent(
+ stackId.getStackName(), stackId.getStackVersion(), service.getName(), component.getName());
- for (ComponentInfo component : service.getComponents()) {
- components.put(component.getName(), component.getCategory());
+ componentsMap.get(service.getName()).put(component.getName(), componentInfo.getCategory());
}
-
- result.put(service.getName(), components);
}
- return result;
+ response.setClusterName(clusterName);
+ response.setComponents(componentsMap);
+
+ return response;
}
/**
@@ -606,7 +546,7 @@ public class HeartBeatHandler {
return null;
}
- List<AlertDefinitionCommand> commands = new ArrayList<AlertDefinitionCommand>();
+ List<AlertDefinitionCommand> commands = new ArrayList<>();
// for every cluster this host is a member of, build the command
for (Cluster cluster : hostClusters) {
@@ -662,7 +602,7 @@ public class HeartBeatHandler {
File keytabFile = new File(dataDir + File.separator + hostName + File.separator + sha1Keytab);
if (keytabFile.canRead()) {
- Map<String, String> keytabMap = new HashMap<String, String>();
+ Map<String, String> keytabMap = new HashMap<>();
String principal = record.get(KerberosIdentityDataFileReader.PRINCIPAL);
String isService = record.get(KerberosIdentityDataFileReader.SERVICE);
@@ -690,7 +630,7 @@ public class HeartBeatHandler {
}
}
} else if ("REMOVE_KEYTAB".equalsIgnoreCase(command)) {
- Map<String, String> keytabMap = new HashMap<String, String>();
+ Map<String, String> keytabMap = new HashMap<>();
keytabMap.put(KerberosIdentityDataFileReader.HOSTNAME, hostName);
keytabMap.put(KerberosIdentityDataFileReader.SERVICE, record.get(KerberosIdentityDataFileReader.SERVICE));
@@ -722,4 +662,4 @@ public class HeartBeatHandler {
heartbeatMonitor.shutdown();
heartbeatProcessor.stopAsync();
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/HostEventCreator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/HostEventCreator.java b/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/HostEventCreator.java
index d05fe9d..1abb0d9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/HostEventCreator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/HostEventCreator.java
@@ -59,7 +59,7 @@ public class HostEventCreator implements RequestAuditEventCreator {
/**
* Pattern to retrieve hostname from url
*/
- private static final Pattern HOSTNAME_PATTERN = Pattern.compile(".*" + HostResourceProvider.HOST_NAME_PROPERTY_ID + "\\s*=\\s*([^&\\s]+).*");
+ private static final Pattern HOSTNAME_PATTERN = Pattern.compile(".*" + HostResourceProvider.HOST_HOST_NAME_PROPERTY_ID + "\\s*=\\s*([^&\\s]+).*");
/**
* {@inheritDoc}
@@ -109,7 +109,7 @@ public class HostEventCreator implements RequestAuditEventCreator {
.withResultStatus(result.getStatus())
.withUrl(request.getURI())
.withRemoteIp(request.getRemoteAddress())
- .withHostName(RequestAuditEventCreatorHelper.getNamedProperty(request, HostResourceProvider.HOST_NAME_PROPERTY_ID))
+ .withHostName(RequestAuditEventCreatorHelper.getNamedProperty(request, HostResourceProvider.HOST_HOST_NAME_PROPERTY_ID))
.build();
case QUERY_POST:
return AddComponentToHostRequestAuditEvent.builder()
@@ -157,4 +157,4 @@ public class HostEventCreator implements RequestAuditEventCreator {
}
return null;
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index becf596..16b4917 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -204,6 +204,7 @@ import org.apache.ambari.server.utils.SecretReference;
import org.apache.ambari.server.utils.StageUtils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.http.client.utils.URIBuilder;
@@ -453,18 +454,6 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
throw new StackAccessException("stackName=" + stackId.getStackName() + ", stackVersion=" + stackId.getStackVersion());
}
- RepositoryVersionEntity versionEntity = null;
-
- if (null != request.getRepositoryVersion()) {
- versionEntity = repositoryVersionDAO.findByStackAndVersion(stackId,
- request.getRepositoryVersion());
-
- if (null == versionEntity) {
- throw new AmbariException(String.format("Tried to create a cluster on version %s, but that version doesn't exist",
- request.getRepositoryVersion()));
- }
- }
-
// FIXME add support for desired configs at cluster level
boolean foundInvalidHosts = false;
@@ -756,22 +745,21 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
@Override
public void registerRackChange(String clusterName) throws AmbariException {
Cluster cluster = clusters.getCluster(clusterName);
- StackId stackId = cluster.getCurrentStackVersion();
- Set<String> rackSensitiveServices =
- ambariMetaInfo.getRackSensitiveServicesNames(stackId.getStackName(), stackId.getStackVersion());
+ for (Service service : cluster.getServices().values()) {
+ ServiceInfo serviceInfo = ambariMetaInfo.getService(service);
+
+ if (!BooleanUtils.toBoolean(serviceInfo.isRestartRequiredAfterRackChange())) {
+ continue;
+ }
- Map<String, Service> services = cluster.getServices();
+ Map<String, ServiceComponent> serviceComponents = service.getServiceComponents();
- for (Service service : services.values()) {
- if(rackSensitiveServices.contains(service.getName())) {
- Map<String, ServiceComponent> serviceComponents = service.getServiceComponents();
- for (ServiceComponent serviceComponent : serviceComponents.values()) {
- Map<String, ServiceComponentHost> schMap = serviceComponent.getServiceComponentHosts();
- for (Entry<String, ServiceComponentHost> sch : schMap.entrySet()) {
- ServiceComponentHost serviceComponentHost = sch.getValue();
- serviceComponentHost.setRestartRequired(true);
- }
+ for (ServiceComponent serviceComponent : serviceComponents.values()) {
+ Map<String, ServiceComponentHost> schMap = serviceComponent.getServiceComponentHosts();
+ for (Entry<String, ServiceComponentHost> sch : schMap.entrySet()) {
+ ServiceComponentHost serviceComponentHost = sch.getValue();
+ serviceComponentHost.setRestartRequired(true);
}
}
}
@@ -1048,10 +1036,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
Set<ClusterResponse> response = new HashSet<>();
if (LOG.isDebugEnabled()) {
- LOG.debug("Received a getClusters request"
- + ", clusterName=" + request.getClusterName()
- + ", clusterId=" + request.getClusterId()
- + ", stackInfo=" + request.getStackVersion());
+ LOG.debug("Received a getClusters request, clusterName={}, clusterId={}, stackInfo={}",
+ request.getClusterName(), request.getClusterId(), request.getStackVersion());
}
Cluster singleCluster = null;
@@ -1159,20 +1145,13 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
}
if (request.getComponentName() != null) {
- if (request.getServiceName() == null
- || request.getServiceName().isEmpty()) {
- StackId stackId = cluster.getDesiredStackVersion();
- String serviceName =
- ambariMetaInfo.getComponentToService(stackId.getStackName(),
- stackId.getStackVersion(), request.getComponentName());
- if (LOG.isDebugEnabled()) {
- LOG.debug("Looking up service name for component"
- + ", componentName=" + request.getComponentName()
- + ", serviceName=" + serviceName
- + ", stackInfo=" + stackId.getStackId());
- }
- if (serviceName == null
- || serviceName.isEmpty()) {
+ if (StringUtils.isBlank(request.getServiceName())) {
+
+ // !!! FIXME the assumption that a component is unique across all stacks is a ticking
+ // time bomb. Blueprints are making this assumption.
+ String serviceName = findServiceName(cluster, request.getComponentName());
+
+ if (StringUtils.isBlank(serviceName)) {
LOG.error("Unable to find service for component {}", request.getComponentName());
throw new ServiceComponentHostNotFoundException(
cluster.getClusterName(), null, request.getComponentName(), request.getHostname());
@@ -1897,7 +1876,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
}
ClusterResponse clusterResponse =
- new ClusterResponse(cluster.getClusterId(), cluster.getClusterName(), null, null, null, null, null, null);
+ new ClusterResponse(cluster.getClusterId(), cluster.getClusterName(), null, null, null, 0,
+ null, null);
Map<String, Collection<ServiceConfigVersionResponse>> map =
new HashMap<>();
@@ -3468,24 +3448,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
@Override
public String findServiceName(Cluster cluster, String componentName) throws AmbariException {
- StackId stackId = cluster.getDesiredStackVersion();
- String serviceName =
- ambariMetaInfo.getComponentToService(stackId.getStackName(),
- stackId.getStackVersion(), componentName);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Looking up service name for component"
- + ", componentName=" + componentName
- + ", serviceName=" + serviceName);
- }
-
- if (serviceName == null
- || serviceName.isEmpty()) {
- throw new AmbariException("Could not find service for component"
- + ", componentName=" + componentName
- + ", clusterName=" + cluster.getClusterName()
- + ", stackInfo=" + stackId.getStackId());
- }
- return serviceName;
+ return cluster.getServiceByComponentName(componentName).getName();
}
/**
@@ -5256,52 +5219,52 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
@SuppressWarnings("unchecked")
@Override
public void initializeWidgetsAndLayouts(Cluster cluster, Service service) throws AmbariException {
- StackId stackId = cluster.getDesiredStackVersion();
Type widgetLayoutType = new TypeToken<Map<String, List<WidgetLayout>>>(){}.getType();
- try {
- Map<String, Object> widgetDescriptor = null;
- StackInfo stackInfo = ambariMetaInfo.getStack(stackId.getStackName(), stackId.getStackVersion());
- if (service != null) {
- // Service widgets
- ServiceInfo serviceInfo = stackInfo.getService(service.getName());
- File widgetDescriptorFile = serviceInfo.getWidgetsDescriptorFile();
- if (widgetDescriptorFile != null && widgetDescriptorFile.exists()) {
- try {
- widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType);
- } catch (Exception ex) {
- String msg = "Error loading widgets from file: " + widgetDescriptorFile;
- LOG.error(msg, ex);
- throw new AmbariException(msg);
- }
- }
- } else {
- // Cluster level widgets
+ Set<File> widgetDescriptorFiles = new HashSet<>();
+
+ if (null != service) {
+ ServiceInfo serviceInfo = ambariMetaInfo.getService(service);
+ File widgetDescriptorFile = serviceInfo.getWidgetsDescriptorFile();
+ if (widgetDescriptorFile != null && widgetDescriptorFile.exists()) {
+ widgetDescriptorFiles.add(widgetDescriptorFile);
+ }
+ } else {
+ Set<StackId> stackIds = new HashSet<>();
+
+ for (Service svc : cluster.getServices().values()) {
+ stackIds.add(svc.getDesiredStackId());
+ }
+
+ for (StackId stackId : stackIds) {
+ StackInfo stackInfo = ambariMetaInfo.getStack(stackId);
+
String widgetDescriptorFileLocation = stackInfo.getWidgetsDescriptorFileLocation();
if (widgetDescriptorFileLocation != null) {
File widgetDescriptorFile = new File(widgetDescriptorFileLocation);
if (widgetDescriptorFile.exists()) {
- try {
- widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType);
- } catch (Exception ex) {
- String msg = "Error loading widgets from file: " + widgetDescriptorFile;
- LOG.error(msg, ex);
- throw new AmbariException(msg);
- }
+ widgetDescriptorFiles.add(widgetDescriptorFile);
}
}
}
- if (widgetDescriptor != null) {
- LOG.debug("Loaded widget descriptor: " + widgetDescriptor);
+ }
+
+ for (File widgetDescriptorFile : widgetDescriptorFiles) {
+ Map<String, Object> widgetDescriptor = null;
+
+ try {
+ widgetDescriptor = gson.fromJson(new FileReader(widgetDescriptorFile), widgetLayoutType);
+
for (Object artifact : widgetDescriptor.values()) {
List<WidgetLayout> widgetLayouts = (List<WidgetLayout>) artifact;
createWidgetsAndLayouts(cluster, widgetLayouts);
}
+
+ } catch (Exception ex) {
+ String msg = "Error loading widgets from file: " + widgetDescriptorFile;
+ LOG.error(msg, ex);
+ throw new AmbariException(msg);
}
- } catch (Exception e) {
- throw new AmbariException("Error creating stack widget artifacts. " +
- (service != null ? "Service: " + service.getName() + ", " : "") +
- "Cluster: " + cluster.getClusterName(), e);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
index 4d4fd59..aea2072 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterRequest.java
@@ -240,19 +240,4 @@ public class ClusterRequest {
public void setServiceConfigVersionRequest(ServiceConfigVersionRequest serviceConfigVersionRequest) {
this.serviceConfigVersionRequest = serviceConfigVersionRequest;
}
-
- /**
- * @param version the repo version to use
- */
- public void setRepositoryVersion(String version) {
- repositoryVersion = version;
- }
-
- /**
- * @return the repo version to use
- */
- public String getRepositoryVersion() {
- return repositoryVersion;
- }
-
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
index c7577ee..968dbba 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ClusterResponse.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -20,6 +20,7 @@ package org.apache.ambari.server.controller;
import java.util.Collection;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import org.apache.ambari.server.state.ClusterHealthReport;
@@ -29,38 +30,23 @@ import org.apache.ambari.server.state.State;
public class ClusterResponse {
- private final Long clusterId;
-
+ private final long clusterId;
private final String clusterName;
-
private final Set<String> hostNames;
-
private final String desiredStackVersion;
+ private final State provisioningState;
+ private final SecurityType securityType;
+ private final int totalHosts;
private Map<String, DesiredConfig> desiredConfigs;
-
private Map<String, Collection<ServiceConfigVersionResponse>> desiredServiceConfigVersions;
-
- private String provisioningState;
-
- /**
- * The cluster's security.
- * <p/>
- * See {@link org.apache.ambari.server.state.SecurityType} for relevant values.
- */
- private String securityType;
-
- private Integer totalHosts;
-
private ClusterHealthReport clusterHealthReport;
+ private Map<String, String> credentialStoreServiceProperties;
- private Map<String, String> credentialStoreServiceProperties = null;
-
- public ClusterResponse(Long clusterId, String clusterName,
- State provisioningState, SecurityType securityType, Set<String> hostNames, Integer totalHosts,
+ public ClusterResponse(long clusterId, String clusterName,
+ State provisioningState, SecurityType securityType, Set<String> hostNames, int totalHosts,
String desiredStackVersion, ClusterHealthReport clusterHealthReport) {
- super();
this.clusterId = clusterId;
this.clusterName = clusterName;
this.hostNames = hostNames;
@@ -69,13 +55,15 @@ public class ClusterResponse {
this.clusterHealthReport = clusterHealthReport;
if (null != provisioningState) {
- this.provisioningState = provisioningState.name();
+ this.provisioningState = provisioningState;
+ } else {
+ this.provisioningState = State.UNKNOWN;
}
if (null == securityType) {
- this.securityType = SecurityType.NONE.name();
+ this.securityType = SecurityType.NONE;
} else {
- this.securityType = securityType.name();
+ this.securityType = securityType;
}
}
@@ -94,19 +82,12 @@ public class ClusterResponse {
}
/**
- * @return the host names
- */
- public Set<String> getHostNames() {
- return hostNames;
- }
-
- /**
* Gets whether the cluster is still initializing or has finished with its
* deployment requests.
*
* @return either {@code INIT} or {@code INSTALLED}, never {@code null}.
*/
- public String getProvisioningState() {
+ public State getProvisioningState() {
return provisioningState;
}
@@ -117,31 +98,19 @@ public class ClusterResponse {
*
* @return the cluster's security type
*/
- public String getSecurityType() {
+ public SecurityType getSecurityType() {
return securityType;
}
- /**
- * Sets the cluster's security type.
- * <p/>
- * See {@link org.apache.ambari.server.state.SecurityType} for relevant values.
- *
- * @param securityType a String declaring the cluster's security type
- */
- public void setSecurityType(String securityType) {
- this.securityType = securityType;
- }
-
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
- sb.append("{"
- + " clusterName=" + clusterName
- + ", clusterId=" + clusterId
- + ", provisioningState=" + provisioningState
- + ", desiredStackVersion=" + desiredStackVersion
- + ", totalHosts=" + totalHosts
- + ", hosts=[");
+ sb.append("{ clusterName=").append(clusterName)
+ .append(", clusterId=").append(clusterId)
+ .append(", provisioningState=").append(provisioningState)
+ .append(", desiredStackVersion=").append(desiredStackVersion)
+ .append(", totalHosts=").append(totalHosts)
+ .append(", hosts=[");
if (hostNames != null) {
int i = 0;
@@ -153,9 +122,7 @@ public class ClusterResponse {
sb.append(hostName);
}
}
- sb.append("]"
- + ", clusterHealthReport= " + clusterHealthReport
- + "}");
+ sb.append("], clusterHealthReport= ").append(clusterHealthReport).append("}");
return sb.toString();
}
@@ -168,25 +135,15 @@ public class ClusterResponse {
return false;
}
- ClusterResponse that = (ClusterResponse) o;
-
- if (clusterId != null ?
- !clusterId.equals(that.clusterId) : that.clusterId != null) {
- return false;
- }
- if (clusterName != null ?
- !clusterName.equals(that.clusterName) : that.clusterName != null) {
- return false;
- }
+ ClusterResponse other = (ClusterResponse) o;
- return true;
+ return Objects.equals(clusterId, other.clusterId) &&
+ Objects.equals(clusterName, other.clusterName);
}
@Override
public int hashCode() {
- int result = clusterId != null ? clusterId.intValue() : 0;
- result = 71 * result + (clusterName != null ? clusterName.hashCode() : 0);
- return result;
+ return Objects.hash(clusterId, clusterName);
}
/**
@@ -196,9 +153,6 @@ public class ClusterResponse {
return desiredStackVersion;
}
- /**
- * @param configs
- */
public void setDesiredConfigs(Map<String, DesiredConfig> configs) {
desiredConfigs = configs;
}
@@ -239,4 +193,4 @@ public class ClusterResponse {
public Map<String, String> getCredentialStoreServiceProperties() {
return credentialStoreServiceProperties;
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
index 3097a07..86ddc5f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/KerberosHelperImpl.java
@@ -60,7 +60,6 @@ import org.apache.ambari.server.orm.entities.ArtifactEntity;
import org.apache.ambari.server.security.credential.Credential;
import org.apache.ambari.server.security.credential.PrincipalKeyCredential;
import org.apache.ambari.server.security.encryption.CredentialStoreService;
-import org.apache.ambari.server.serveraction.ActionLog;
import org.apache.ambari.server.serveraction.ServerAction;
import org.apache.ambari.server.serveraction.kerberos.CleanupServerAction;
import org.apache.ambari.server.serveraction.kerberos.ConfigureAmbariIdentitiesServerAction;
@@ -129,7 +128,7 @@ import com.google.inject.persist.Transactional;
@Singleton
public class KerberosHelperImpl implements KerberosHelper {
- private static final String BASE_LOG_DIR = "/tmp/ambari";
+ public static final String BASE_LOG_DIR = "/tmp/ambari";
private static final Logger LOG = LoggerFactory.getLogger(KerberosHelperImpl.class);
@@ -338,7 +337,8 @@ public class KerberosHelperImpl implements KerberosHelper {
existingConfigurations, installedServices, serviceFilter, previouslyExistingServices, true, true);
for (Map.Entry<String, Map<String, String>> entry : updates.entrySet()) {
- configHelper.updateConfigType(cluster, cluster.getDesiredStackVersion(), ambariManagementController, entry.getKey(), entry.getValue(), null,
+ configHelper.updateConfigType(cluster, cluster.getDesiredStackVersion(),
+ ambariManagementController, entry.getKey(), entry.getValue(), null,
ambariManagementController.getAuthName(), "Enabling Kerberos for added components");
}
}
@@ -420,55 +420,6 @@ public class KerberosHelperImpl implements KerberosHelper {
: kerberosConfigurations;
}
- /**
- * Processes the configuration values related to a particular Kerberos descriptor identity definition
- * by:
- * <ol>
- * <li>
- * merging the declared properties and their values from <code>identityConfigurations</code> with the set of
- * Kerberos-related configuration updates in <code>kerberosConfigurations</code>, using the existing cluster
- * configurations in <code>configurations</code>
- * </li>
- * <li>
- * ensuring that these properties are not overwritten by recommendations by the stack advisor later
- * in the workflow by adding them to the <code>propertiesToIgnore</code> map
- * </li>
- * </ol>
- *
- * @param identityConfigurations a map of config-types to property name/value pairs to process
- * @param kerberosConfigurations a map of config-types to property name/value pairs to be applied
- * as configuration updates
- * @param configurations a map of config-types to property name/value pairs representing
- * the existing configurations for the cluster
- * @param propertiesToIgnore a map of config-types to property names to be ignored while
- * processing stack advisor recommendations
- * @throws AmbariException
- */
- private void processIdentityConfigurations(Map<String, Map<String, String>> identityConfigurations,
- Map<String, Map<String, String>> kerberosConfigurations,
- Map<String, Map<String, String>> configurations,
- Map<String, Set<String>> propertiesToIgnore)
- throws AmbariException {
- if (identityConfigurations != null) {
- for (Map.Entry<String, Map<String, String>> identitiyEntry : identityConfigurations.entrySet()) {
- String configType = identitiyEntry.getKey();
- Map<String, String> properties = identitiyEntry.getValue();
-
- mergeConfigurations(kerberosConfigurations, configType, identitiyEntry.getValue(), configurations);
-
- if ((properties != null) && !properties.isEmpty()) {
- Set<String> propertyNames = propertiesToIgnore.get(configType);
- if (propertyNames == null) {
- propertyNames = new HashSet<>();
- propertiesToIgnore.put(configType, propertyNames);
- }
- propertyNames.addAll(properties.keySet());
- }
- }
- }
-
- }
-
@Override
public Map<String, Map<String, String>> applyStackAdvisorUpdates(Cluster cluster, Set<String> services,
Map<String, Map<String, String>> existingConfigurations,
@@ -477,8 +428,6 @@ public class KerberosHelperImpl implements KerberosHelper {
Map<String, Set<String>> propertiesToRemove,
boolean kerberosEnabled) throws AmbariException {
- StackId stackVersion = cluster.getCurrentStackVersion();
-
List<String> hostNames = new ArrayList<>();
Collection<Host> hosts = cluster.getHosts();
@@ -539,44 +488,58 @@ public class KerberosHelperImpl implements KerberosHelper {
}
}
- StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
- .forStack(stackVersion.getStackName(), stackVersion.getStackVersion())
- .forServices(new ArrayList<>(services))
- .forHosts(hostNames)
- .withComponentHostsMap(cluster.getServiceComponentHostMap(null, services))
- .withConfigurations(requestConfigurations)
- .ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS)
- .build();
+ Set<StackId> visitedStacks = new HashSet<>();
- try {
- RecommendationResponse response = stackAdvisorHelper.recommend(request);
-
- RecommendationResponse.Recommendation recommendation = (response == null) ? null : response.getRecommendations();
- RecommendationResponse.Blueprint blueprint = (recommendation == null) ? null : recommendation.getBlueprint();
- Map<String, RecommendationResponse.BlueprintConfigurations> configurations = (blueprint == null) ? null : blueprint.getConfigurations();
-
- if (configurations != null) {
- for (Map.Entry<String, RecommendationResponse.BlueprintConfigurations> configuration : configurations.entrySet()) {
- String configType = configuration.getKey();
- Map<String, String> recommendedConfigProperties = configuration.getValue().getProperties();
- Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes = configuration.getValue().getPropertyAttributes();
- Map<String, String> existingConfigProperties = (existingConfigurations == null) ? null : existingConfigurations.get(configType);
- Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
- Set<String> ignoreProperties = (propertiesToIgnore == null) ? null : propertiesToIgnore.get(configType);
-
- addRecommendedPropertiesForConfigType(kerberosConfigurations, configType, recommendedConfigProperties,
- existingConfigProperties, kerberosConfigProperties, ignoreProperties);
-
- if (recommendedConfigPropertyAttributes != null) {
- removeRecommendedPropertiesForConfigType(configType, recommendedConfigPropertyAttributes,
- existingConfigProperties, kerberosConfigurations, ignoreProperties, propertiesToRemove);
+ for (String serviceName : services) {
+ Service service = cluster.getService(serviceName);
+ StackId stackId = service.getDesiredStackId();
+
+ if (visitedStacks.contains(stackId)) {
+ continue;
+ }
+
+ StackAdvisorRequest request = StackAdvisorRequest.StackAdvisorRequestBuilder
+ .forStack(stackId.getStackName(), stackId.getStackVersion())
+ .forServices(new ArrayList<>(services))
+ .forHosts(hostNames)
+ .withComponentHostsMap(cluster.getServiceComponentHostMap(null, services))
+ .withConfigurations(requestConfigurations)
+ .ofType(StackAdvisorRequest.StackAdvisorRequestType.CONFIGURATIONS)
+ .build();
+
+ try {
+ RecommendationResponse response = stackAdvisorHelper.recommend(request);
+
+ RecommendationResponse.Recommendation recommendation = (response == null) ? null : response.getRecommendations();
+ RecommendationResponse.Blueprint blueprint = (recommendation == null) ? null : recommendation.getBlueprint();
+ Map<String, RecommendationResponse.BlueprintConfigurations> configurations = (blueprint == null) ? null : blueprint.getConfigurations();
+
+ if (configurations != null) {
+ for (Map.Entry<String, RecommendationResponse.BlueprintConfigurations> configuration : configurations.entrySet()) {
+ String configType = configuration.getKey();
+ Map<String, String> recommendedConfigProperties = configuration.getValue().getProperties();
+ Map<String, ValueAttributesInfo> recommendedConfigPropertyAttributes = configuration.getValue().getPropertyAttributes();
+ Map<String, String> existingConfigProperties = (existingConfigurations == null) ? null : existingConfigurations.get(configType);
+ Map<String, String> kerberosConfigProperties = kerberosConfigurations.get(configType);
+ Set<String> ignoreProperties = (propertiesToIgnore == null) ? null : propertiesToIgnore.get(configType);
+
+ addRecommendedPropertiesForConfigType(kerberosConfigurations, configType, recommendedConfigProperties,
+ existingConfigProperties, kerberosConfigProperties, ignoreProperties);
+
+ if (recommendedConfigPropertyAttributes != null) {
+ removeRecommendedPropertiesForConfigType(configType, recommendedConfigPropertyAttributes,
+ existingConfigProperties, kerberosConfigurations, ignoreProperties, propertiesToRemove);
+ }
}
}
+
+ } catch (Exception e) {
+ throw new AmbariException(e.getMessage(), e);
}
- } catch (Exception e) {
- throw new AmbariException(e.getMessage(), e);
+ visitedStacks.add(stackId);
}
+
}
return kerberosConfigurations;
@@ -817,7 +780,7 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param kerberosDetails a KerberosDetails containing information about relevant Kerberos configuration
* @param updateJAASFile true to update Ambari's JAAS file; false otherwise
* @throws AmbariException
- * @see ConfigureAmbariIdentitiesServerAction#configureJAAS(String, String, ActionLog)
+ * @see ConfigureAmbariIdentitiesServerAction#configureJAAS(String, String, org.apache.ambari.server.serveraction.ActionLog)
*/
private void installAmbariIdentity(KerberosIdentityDescriptor ambariServerIdentity,
Keytab keytab, Map<String, Map<String, String>> configurations,
@@ -1754,6 +1717,7 @@ public class KerberosHelperImpl implements KerberosHelper {
// Gather data needed to create stages and tasks...
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
+
Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster, cluster.getDesiredStackVersion());
String hostParamsJson = StageUtils.getGson().toJson(hostParams);
String ambariServerHostname = StageUtils.getHostName();
@@ -1954,6 +1918,7 @@ public class KerberosHelperImpl implements KerberosHelper {
// Gather data needed to create stages and tasks...
Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(cluster);
String clusterHostInfoJson = StageUtils.getGson().toJson(clusterHostInfo);
+
Map<String, String> hostParams = customCommandExecutionHelper.createDefaultHostParams(cluster, cluster.getDesiredStackVersion());
String hostParamsJson = StageUtils.getGson().toJson(hostParams);
String ambariServerHostname = StageUtils.getHostName();
@@ -2108,43 +2073,6 @@ public class KerberosHelperImpl implements KerberosHelper {
}
}
- /**
- * Creates a temporary file within the system temporary directory
- * <p/>
- * The resulting file is to be removed by the caller when desired.
- *
- * @return a File pointing to the new temporary file, or null if one was not created
- * @throws AmbariException if a new temporary directory cannot be created
- */
- protected File createTemporaryFile() throws AmbariException {
- try {
- return File.createTempFile("tmp", ".tmp", getConfiguredTemporaryDirectory());
- } catch (IOException e) {
- String message = "Failed to create a temporary file.";
- LOG.error(message, e);
- throw new AmbariException(message, e);
- }
- }
-
- /**
- * Gets the configured temporary directory.
- *
- * @return a File pointing to the configured temporary directory
- * @throws IOException
- */
- protected File getConfiguredTemporaryDirectory() throws IOException {
- String tempDirectoryPath = configuration.getServerTempDir();
-
- if (StringUtils.isEmpty(tempDirectoryPath)) {
- tempDirectoryPath = System.getProperty("java.io.tmpdir");
- }
-
- if (tempDirectoryPath == null) {
- throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
- }
-
- return new File(tempDirectoryPath);
- }
/**
* Merges the specified configuration property in a map of configuration types.
@@ -2237,6 +2165,43 @@ public class KerberosHelperImpl implements KerberosHelper {
}
}
+ /**
+ * Creates a temporary file within the system temporary directory
+ * <p/>
+ * The resulting file is to be removed by the caller when desired.
+ *
+ * @return a File pointing to the new temporary file, or null if one was not created
+ * @throws AmbariException if a new temporary directory cannot be created
+ */
+ protected File createTemporaryFile() throws AmbariException {
+ try {
+ return File.createTempFile("tmp", ".tmp", getConfiguredTemporaryDirectory());
+ } catch (IOException e) {
+ String message = "Failed to create a temporary file.";
+ LOG.error(message, e);
+ throw new AmbariException(message, e);
+ }
+ }
+
+ /**
+ * Gets the configured temporary directory.
+ *
+ * @return a File pointing to the configured temporary directory
+ * @throws IOException
+ */
+ protected File getConfiguredTemporaryDirectory() throws IOException {
+ String tempDirectoryPath = configuration.getServerTempDir();
+
+ if (StringUtils.isEmpty(tempDirectoryPath)) {
+ tempDirectoryPath = System.getProperty("java.io.tmpdir");
+ }
+
+ if (tempDirectoryPath == null) {
+ throw new IOException("The System property 'java.io.tmpdir' does not specify a temporary directory");
+ }
+
+ return new File(tempDirectoryPath);
+ }
/**
* Creates a new stage
@@ -2245,7 +2210,6 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param cluster the relevant Cluster
* @param requestId the relevant request Id
* @param requestContext a String describing the stage
- * @param clusterHostInfo JSON-encoded clusterHostInfo structure
* @param commandParams JSON-encoded command parameters
* @param hostParams JSON-encoded host parameters
* @return a newly created Stage
@@ -2273,7 +2237,6 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param cluster the relevant Cluster
* @param requestId the relevant request Id
* @param requestContext a String describing the stage
- * @param clusterHostInfo JSON-encoded clusterHostInfo structure
* @param commandParams JSON-encoded command parameters
* @param hostParams JSON-encoded host parameters
* @param actionClass The ServeAction class that implements the action to invoke
@@ -2429,9 +2392,9 @@ public class KerberosHelperImpl implements KerberosHelper {
* @param componentName the name of a component for which to find results, null indicates all
* components
* @param kerberosDescriptor the relevant Kerberos Descriptor
+ * requested service component
* @param filterContext the context to use for filtering identities based on the state of the cluster
* @return a list of KerberosIdentityDescriptors representing the active identities for the
- * requested service component
* @throws AmbariException if an error occurs processing the cluster's active identities
*/
private List<KerberosIdentityDescriptor> getActiveIdentities(Cluster cluster,
@@ -2623,7 +2586,18 @@ public class KerberosHelperImpl implements KerberosHelper {
* @throws AmbariException if an error occurs while retrieving the Kerberos descriptor
*/
private KerberosDescriptor getKerberosDescriptorFromStack(Cluster cluster) throws AmbariException {
- StackId stackId = cluster.getCurrentStackVersion();
+ // !!! FIXME in a per-service view, what does this become?
+ Set<StackId> stackIds = new HashSet<>();
+
+ for (Service service : cluster.getServices().values()) {
+ stackIds.add(service.getDesiredStackId());
+ }
+
+ if (1 != stackIds.size()) {
+ throw new AmbariException("Services are deployed from multiple stacks and cannot determine a unique one.");
+ }
+
+ StackId stackId = stackIds.iterator().next();
// -------------------------------
// Get the default Kerberos descriptor from the stack, which is the same as the value from
@@ -2679,6 +2653,55 @@ public class KerberosHelperImpl implements KerberosHelper {
return identitiesToRemove;
}
+ /**
+ * Processes the configuration values related to a particular Kerberos descriptor identity definition
+ * by:
+ * <ol>
+ * <li>
+ * merging the declared properties and their values from <code>identityConfigurations</code> with the set of
+ * Kerberos-related configuration updates in <code>kerberosConfigurations</code>, using the existing cluster
+ * configurations in <code>configurations</code>
+ * </li>
+ * <li>
+ * ensuring that these properties are not overwritten by recommendations by the stack advisor later
+ * in the workflow by adding them to the <code>propertiesToIgnore</code> map
+ * </li>
+ * </ol>
+ *
+ * @param identityConfigurations a map of config-types to property name/value pairs to process
+ * @param kerberosConfigurations a map of config-types to property name/value pairs to be applied
+ * as configuration updates
+ * @param configurations a map of config-types to property name/value pairs representing
+ * the existing configurations for the cluster
+ * @param propertiesToIgnore a map of config-types to property names to be ignored while
+ * processing stack advisor recommendations
+ * @throws AmbariException
+ */
+ private void processIdentityConfigurations(Map<String, Map<String, String>> identityConfigurations,
+ Map<String, Map<String, String>> kerberosConfigurations,
+ Map<String, Map<String, String>> configurations,
+ Map<String, Set<String>> propertiesToIgnore)
+ throws AmbariException {
+ if (identityConfigurations != null) {
+ for (Map.Entry<String, Map<String, String>> identitiyEntry : identityConfigurations.entrySet()) {
+ String configType = identitiyEntry.getKey();
+ Map<String, String> properties = identitiyEntry.getValue();
+
+ mergeConfigurations(kerberosConfigurations, configType, identitiyEntry.getValue(), configurations);
+
+ if ((properties != null) && !properties.isEmpty()) {
+ Set<String> propertyNames = propertiesToIgnore.get(configType);
+ if (propertyNames == null) {
+ propertyNames = new HashSet<>();
+ propertiesToIgnore.put(configType, propertyNames);
+ }
+ propertyNames.addAll(properties.keySet());
+ }
+ }
+ }
+
+ }
+
/* ********************************************************************************************
* Helper classes and enums
* ******************************************************************************************** *\
@@ -2959,7 +2982,7 @@ public class KerberosHelperImpl implements KerberosHelper {
hostParamsJson);
Collection<ServiceComponentHost> filteredComponents = filterServiceComponentHostsForHosts(
- new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
+ new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
if (!filteredComponents.isEmpty()) {
List<String> hostsToUpdate = createUniqueHostList(filteredComponents, Collections.singleton(HostState.HEALTHY));
@@ -3100,7 +3123,7 @@ public class KerberosHelperImpl implements KerberosHelper {
hostParamsJson);
Collection<ServiceComponentHost> filteredComponents = filterServiceComponentHostsForHosts(
- new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
+ new ArrayList<>(serviceComponentHosts), hostsWithValidKerberosClient);
if (!filteredComponents.isEmpty()) {
List<String> hostsToUpdate = createUniqueHostList(filteredComponents, Collections.singleton(HostState.HEALTHY));
@@ -3768,4 +3791,4 @@ public class KerberosHelperImpl implements KerberosHelper {
!"false".equalsIgnoreCase(kerberosEnvProperties.get(CREATE_AMBARI_PRINCIPAL));
}
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
index 1a6a040..2094a34 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceRequest.java
@@ -28,26 +28,24 @@ public class ServiceRequest {
private String credentialStoreEnabled; // CREATE/UPDATE/GET
private String credentialStoreSupported; //GET
- private String desiredStack;
- private String desiredRepositoryVersion;
+ private Long desiredRepositoryVersionId;
/**
* Short-lived object that gets set while validating a request
*/
private RepositoryVersionEntity resolvedRepository;
- public ServiceRequest(String clusterName, String serviceName, String desiredStack,
- String desiredRepositoryVersion, String desiredState) {
- this(clusterName, serviceName, desiredStack, desiredRepositoryVersion, desiredState, null);
+ public ServiceRequest(String clusterName, String serviceName,
+ Long desiredRepositoryVersionId, String desiredState) {
+ this(clusterName, serviceName, desiredRepositoryVersionId, desiredState, null);
}
- public ServiceRequest(String clusterName, String serviceName, String desiredStack,
- String desiredRepositoryVersion, String desiredState, String credentialStoreEnabled) {
+ public ServiceRequest(String clusterName, String serviceName,
+ Long desiredRepositoryVersionId, String desiredState, String credentialStoreEnabled) {
this.clusterName = clusterName;
this.serviceName = serviceName;
this.desiredState = desiredState;
- this.desiredStack = desiredStack;
- this.desiredRepositoryVersion = desiredRepositoryVersion;
+ this.desiredRepositoryVersionId = desiredRepositoryVersionId;
this.credentialStoreEnabled = credentialStoreEnabled;
// Credential store supported cannot be changed after
@@ -83,12 +81,8 @@ public class ServiceRequest {
this.desiredState = desiredState;
}
- public String getDesiredStack() {
- return desiredStack;
- }
-
- public String getDesiredRepositoryVersion() {
- return desiredRepositoryVersion;
+ public Long getDesiredRepositoryVersionId() {
+ return desiredRepositoryVersionId;
}
/**
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
index 577659d..83df6aa 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterResourceProvider.java
@@ -116,12 +116,12 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
* The cluster primary key properties.
*/
private static Set<String> pkPropertyIds =
- new HashSet<String>(Arrays.asList(new String[]{CLUSTER_ID_PROPERTY_ID}));
+ new HashSet<>(Arrays.asList(new String[]{CLUSTER_ID_PROPERTY_ID}));
/**
* The key property ids for a cluster resource.
*/
- private static Map<Resource.Type, String> keyPropertyIds = new HashMap<Resource.Type, String>();
+ private static Map<Resource.Type, String> keyPropertyIds = new HashMap<>();
static {
keyPropertyIds.put(Resource.Type.Cluster, CLUSTER_NAME_PROPERTY_ID);
}
@@ -129,7 +129,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
/**
* The property ids for a cluster resource.
*/
- private static Set<String> propertyIds = new HashSet<String>();
+ private static Set<String> propertyIds = new HashSet<>();
/**
* Used to serialize to/from json.
@@ -237,7 +237,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
public Set<Resource> getResources(Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
- final Set<ClusterRequest> requests = new HashSet<ClusterRequest>();
+ final Set<ClusterRequest> requests = new HashSet<>();
if (predicate == null) {
requests.add(getRequest(Collections.<String, Object>emptyMap()));
@@ -257,7 +257,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
}
});
- Set<Resource> resources = new HashSet<Resource>();
+ Set<Resource> resources = new HashSet<>();
if (LOG.isDebugEnabled()) {
LOG.debug("Found clusters matching getClusters request"
+ ", clusterResponseCount=" + responses.size());
@@ -297,7 +297,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
protected RequestStatus updateResourcesAuthorized(final Request request, Predicate predicate)
throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException {
- final Set<ClusterRequest> requests = new HashSet<ClusterRequest>();
+ final Set<ClusterRequest> requests = new HashSet<>();
RequestStatusResponse response;
for (Map<String, Object> requestPropertyMap : request.getProperties()) {
@@ -321,7 +321,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
if (updateResults != null) {
Map<String, Collection<ServiceConfigVersionResponse>> serviceConfigVersions = updateResults.getDesiredServiceConfigVersions();
if (serviceConfigVersions != null) {
- associatedResources = new HashSet<Resource>();
+ associatedResources = new HashSet<>();
for (Collection<ServiceConfigVersionResponse> scvCollection : serviceConfigVersions.values()) {
for (ServiceConfigVersionResponse serviceConfigVersionResponse : scvCollection) {
Resource resource = new ResourceImpl(Resource.Type.ServiceConfigVersion);
@@ -401,9 +401,9 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
private ClusterRequest getRequest(Map<String, Object> properties) {
SecurityType securityType;
String requestedSecurityType = (String) properties.get(CLUSTER_SECURITY_TYPE_PROPERTY_ID);
- if(requestedSecurityType == null)
+ if(requestedSecurityType == null) {
securityType = null;
- else {
+ } else {
try {
securityType = SecurityType.valueOf(requestedSecurityType.toUpperCase());
} catch (IllegalArgumentException e) {
@@ -420,16 +420,13 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
null,
getSessionAttributes(properties));
- if (properties.containsKey(CLUSTER_REPO_VERSION)) {
- cr.setRepositoryVersion(properties.get(CLUSTER_REPO_VERSION).toString());
- }
-
List<ConfigurationRequest> configRequests = getConfigurationRequests("Clusters", properties);
ServiceConfigVersionRequest serviceConfigVersionRequest = getServiceConfigVersionRequest("Clusters", properties);
- if (!configRequests.isEmpty())
+ if (!configRequests.isEmpty()) {
cr.setDesiredConfig(configRequests);
+ }
if (serviceConfigVersionRequest != null) {
cr.setServiceConfigVersionRequest(serviceConfigVersionRequest);
@@ -447,7 +444,7 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
* @return the map of session attributes
*/
private Map<String, Object> getSessionAttributes(Map<String, Object> properties) {
- Map<String, Object> sessionAttributes = new HashMap<String, Object>();
+ Map<String, Object> sessionAttributes = new HashMap<>();
for (Map.Entry<String, Object> entry : properties.entrySet()) {
@@ -475,11 +472,11 @@ public class ClusterResourceProvider extends AbstractControllerResourceProvider
serviceConfigVersionRequest =
(serviceConfigVersionRequest ==null ) ? new ServiceConfigVersionRequest() : serviceConfigVersionRequest;
- if (propName.equals("service_name"))
+ if (propName.equals("service_name")) {
serviceConfigVersionRequest.setServiceName(entry.getValue().toString());
- else if (propName.equals("service_config_version"))
+ } else if (propName.equals("service_config_version")) {
serviceConfigVersionRequest.setVersion(Long.valueOf(entry.getValue().toString()));
- else if (propName.equals("service_config_version_note")) {
+ } else if (propName.equals("service_config_version_note")) {
serviceConfigVersionRequest.setNote(entry.getValue().toString());
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
index 9137799..c02dcb0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ComponentResourceProvider.java
@@ -224,7 +224,9 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
setResourceProperty(resource, COMPONENT_INIT_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("initCount"), requestedIds);
setResourceProperty(resource, COMPONENT_UNKNOWN_COUNT_PROPERTY_ID, response.getServiceComponentStateCount().get("unknownCount"), requestedIds);
setResourceProperty(resource, COMPONENT_RECOVERY_ENABLED_ID, String.valueOf(response.isRecoveryEnabled()), requestedIds);
-
+ setResourceProperty(resource, COMPONENT_DESIRED_STACK, response.getDesiredStackId(), requestedIds);
+ setResourceProperty(resource, COMPONENT_DESIRED_VERSION, response.getDesiredVersion(), requestedIds);
+ setResourceProperty(resource, COMPONENT_REPOSITORY_STATE, response.getRepositoryState(), requestedIds);
resources.add(resource);
}
return resources;
@@ -436,7 +438,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
return response;
}
- // Get the components for the given request.
private Set<ServiceComponentResponse> getComponents(ServiceComponentRequest request) throws AmbariException {
final AmbariMetaInfo ambariMetaInfo = getManagementController().getAmbariMetaInfo();
@@ -446,7 +447,6 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
Set<ServiceComponentResponse> response = new HashSet<>();
String category = null;
- StackId stackId = cluster.getDesiredStackVersion();
if (request.getComponentName() != null) {
setServiceNameIfAbsent(request, cluster, ambariMetaInfo);
@@ -455,6 +455,8 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
ServiceComponent sc = s.getServiceComponent(request.getComponentName());
ServiceComponentResponse serviceComponentResponse = sc.convertToResponse();
+ StackId stackId = sc.getDesiredStackId();
+
try {
ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
stackId.getStackVersion(), s.getName(), request.getComponentName());
@@ -486,6 +488,8 @@ public class ComponentResourceProvider extends AbstractControllerResourceProvide
continue;
}
+ StackId stackId = sc.getDesiredStackId();
+
ServiceComponentResponse serviceComponentResponse = sc.convertToResponse();
try {
ComponentInfo componentInfo = ambariMetaInfo.getComponent(stackId.getStackName(),
http://git-wip-us.apache.org/repos/asf/ambari/blob/15cd3d83/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index dcaaad9..b1f00e6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -74,7 +74,6 @@ import org.apache.ambari.server.state.ServiceComponentHost;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.State;
-import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
@@ -110,8 +109,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
public static final String SERVICE_DESIRED_STACK_PROPERTY_ID = PropertyHelper.getPropertyId(
"ServiceInfo", "desired_stack");
- public static final String SERVICE_DESIRED_REPO_VERSION_PROPERTY_ID = PropertyHelper.getPropertyId(
- "ServiceInfo", "desired_repository_version");
+ public static final String SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID = PropertyHelper.getPropertyId(
+ "ServiceInfo", "desired_repository_version_id");
protected static final String SERVICE_REPOSITORY_STATE = "ServiceInfo/repository_state";
@@ -145,7 +144,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
PROPERTY_IDS.add(SERVICE_CREDENTIAL_STORE_ENABLED_PROPERTY_ID);
PROPERTY_IDS.add(SERVICE_ATTRIBUTES_PROPERTY_ID);
PROPERTY_IDS.add(SERVICE_DESIRED_STACK_PROPERTY_ID);
- PROPERTY_IDS.add(SERVICE_DESIRED_REPO_VERSION_PROPERTY_ID);
+ PROPERTY_IDS.add(SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID);
PROPERTY_IDS.add(SERVICE_REPOSITORY_STATE);
PROPERTY_IDS.add(QUERY_PARAMETERS_RUN_SMOKE_TEST_ID);
@@ -252,11 +251,17 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
setResourceProperty(resource, SERVICE_CREDENTIAL_STORE_ENABLED_PROPERTY_ID,
String.valueOf(response.isCredentialStoreEnabled()), requestedIds);
- setResourceProperty(resource, SERVICE_DESIRED_STACK_PROPERTY_ID,
- response.getDesiredStackId(), requestedIds);
+ RepositoryVersionEntity repoVersion = repositoryVersionDAO.findByPK(
+ response.getDesiredRepositoryVersionId());
- setResourceProperty(resource, SERVICE_DESIRED_REPO_VERSION_PROPERTY_ID,
- response.getDesiredRepositoryVersion(), requestedIds);
+ // !!! TODO is the UI using this?
+ if (null != repoVersion) {
+ setResourceProperty(resource, SERVICE_DESIRED_STACK_PROPERTY_ID, repoVersion.getStackId(),
+ requestedIds);
+ }
+
+ setResourceProperty(resource, SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID,
+ response.getDesiredRepositoryVersionId(), requestedIds);
setResourceProperty(resource, SERVICE_REPOSITORY_STATE,
response.getRepositoryVersionState(), requestedIds);
@@ -382,13 +387,13 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
* @return the service request object
*/
private ServiceRequest getRequest(Map<String, Object> properties) {
- String desiredStack = (String)properties.get(SERVICE_DESIRED_STACK_PROPERTY_ID);
- String desiredRepositoryVersion = (String)properties.get(SERVICE_DESIRED_REPO_VERSION_PROPERTY_ID);
+
+ String desiredRepoId = (String) properties.get(SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID);
ServiceRequest svcRequest = new ServiceRequest(
(String) properties.get(SERVICE_CLUSTER_NAME_PROPERTY_ID),
(String) properties.get(SERVICE_SERVICE_NAME_PROPERTY_ID),
- desiredStack, desiredRepositoryVersion,
+ null == desiredRepoId ? null : Long.valueOf(desiredRepoId),
(String) properties.get(SERVICE_SERVICE_STATE_PROPERTY_ID),
(String) properties.get(SERVICE_CREDENTIAL_STORE_ENABLED_PROPERTY_ID));
@@ -420,14 +425,10 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
for (ServiceRequest request : requests) {
Cluster cluster = clusters.getCluster(request.getClusterName());
- String desiredStack = request.getDesiredStack();
-
RepositoryVersionEntity repositoryVersion = request.getResolvedRepository();
if (null == repositoryVersion) {
- throw new AmbariException(String.format("Could not find any repositories defined by %s", desiredStack));
- } else {
- desiredStack = repositoryVersion.getStackId().toString();
+ throw new AmbariException("Could not find any repository on the request.");
}
Service s = cluster.addService(request.getServiceName(), repositoryVersion);
@@ -1009,7 +1010,7 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
}
private void validateCreateRequests(Set<ServiceRequest> requests, Clusters clusters)
- throws AuthorizationException, AmbariException {
+ throws AuthorizationException, AmbariException {
AmbariMetaInfo ambariMetaInfo = getManagementController().getAmbariMetaInfo();
Map<String, Set<String>> serviceNames = new HashMap<>();
@@ -1022,10 +1023,12 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
Validate.notEmpty(serviceName, "Service name should be provided when creating a service");
if (LOG.isDebugEnabled()) {
- LOG.debug("Received a createService request, clusterName={}, serviceName={}, request={}", clusterName, serviceName, request);
+ LOG.debug("Received a createService request, clusterName={}, serviceName={}, request={}",
+ clusterName, serviceName, request);
}
- if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) {
+ if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName),
+ RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) {
throw new AuthorizationException("The user is not authorized to create services");
}
@@ -1043,8 +1046,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
if (StringUtils.isNotEmpty(request.getDesiredState())) {
State state = State.valueOf(request.getDesiredState());
if (!state.isValidDesiredState() || state != State.INIT) {
- throw new IllegalArgumentException("Invalid desired state"
- + " only INIT state allowed during creation"
+ throw new IllegalArgumentException(
+ "Invalid desired state" + " only INIT state allowed during creation"
+ ", providedDesiredState=" + request.getDesiredState());
}
}
@@ -1053,7 +1056,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
try {
cluster = clusters.getCluster(clusterName);
} catch (ClusterNotFoundException e) {
- throw new ParentObjectNotFoundException("Attempted to add a service to a cluster which doesn't exist", e);
+ throw new ParentObjectNotFoundException(
+ "Attempted to add a service to a cluster which doesn't exist", e);
}
try {
Service s = cluster.getService(serviceName);
@@ -1066,40 +1070,28 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
// Expected
}
- String desiredStack = request.getDesiredStack();
- StackId stackId = new StackId(desiredStack);
-
- String desiredRepositoryVersion = request.getDesiredRepositoryVersion();
- RepositoryVersionEntity repositoryVersion = null;
- if (StringUtils.isNotBlank(desiredRepositoryVersion)){
- repositoryVersion = repositoryVersionDAO.findByVersion(desiredRepositoryVersion);
+ Long desiredRepositoryVersion = request.getDesiredRepositoryVersionId();
+ if (null == desiredRepositoryVersion) {
+ throw new IllegalArgumentException(String.format("%s is required when adding a service.",
+ SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID));
}
- if (null == repositoryVersion) {
- // !!! FIXME hack until the UI always sends the repository
- if (null == desiredStack) {
- desiredStack = cluster.getDesiredStackVersion().toString();
- }
-
- List<RepositoryVersionEntity> allVersions = repositoryVersionDAO.findByStack(new StackId(desiredStack));
-
- if (CollectionUtils.isNotEmpty(allVersions)) {
- repositoryVersion = allVersions.get(0);
- }
- }
+ RepositoryVersionEntity repositoryVersion = repositoryVersionDAO.findByPK(
+ desiredRepositoryVersion);
if (null == repositoryVersion) {
- throw new AmbariException(String.format("Could not find any repositories defined by %s", desiredStack));
- } else {
- stackId = repositoryVersion.getStackId();
+ throw new IllegalArgumentException(String.format(
+ "Could not find any repositories defined by %d", desiredRepositoryVersion));
}
+ StackId stackId = repositoryVersion.getStackId();
+
request.setResolvedRepository(repositoryVersion);
- if (!ambariMetaInfo.isValidService(stackId.getStackName(),
- stackId.getStackVersion(), request.getServiceName())) {
- throw new IllegalArgumentException("Unsupported or invalid service in stack, clusterName=" + clusterName
- + ", serviceName=" + serviceName + ", stackInfo=" + stackId.getStackId());
+ if (!ambariMetaInfo.isValidService(stackId.getStackName(), stackId.getStackVersion(),
+ request.getServiceName())) {
+ throw new IllegalArgumentException("Unsupported or invalid service in stack, clusterName="
+ + clusterName + ", serviceName=" + serviceName + ", stackInfo=" + stackId.getStackId());
}
// validate the credential store input provided
@@ -1109,25 +1101,25 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
if (StringUtils.isNotEmpty(request.getCredentialStoreEnabled())) {
boolean credentialStoreEnabled = Boolean.parseBoolean(request.getCredentialStoreEnabled());
if (!serviceInfo.isCredentialStoreSupported() && credentialStoreEnabled) {
- throw new IllegalArgumentException("Invalid arguments, cannot enable credential store " +
- "as it is not supported by the service. Service=" + request.getServiceName());
- }
+ throw new IllegalArgumentException("Invalid arguments, cannot enable credential store "
+ + "as it is not supported by the service. Service=" + request.getServiceName());
+ }
}
}
// ensure only a single cluster update
if (serviceNames.size() != 1) {
- throw new IllegalArgumentException("Invalid arguments, updates allowed"
- + "on only one cluster at a time");
+ throw new IllegalArgumentException(
+ "Invalid arguments, updates allowed" + "on only one cluster at a time");
}
// Validate dups
if (!duplicates.isEmpty()) {
String clusterName = requests.iterator().next().getClusterName();
- String msg = "Attempted to create a service which already exists: "
- + ", clusterName=" + clusterName + " serviceName=" + StringUtils.join(duplicates, ",");
+ String msg = "Attempted to create a service which already exists: " + ", clusterName="
+ + clusterName + " serviceName=" + StringUtils.join(duplicates, ",");
throw new DuplicateResourceException(msg);
}
- }
+}
}
\ No newline at end of file