You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ad...@apache.org on 2018/01/17 19:42:39 UTC

[ambari] branch trunk updated: AMBARI-22726. Fix dependent tests in BlueprintImplTest (#133)

This is an automated email from the ASF dual-hosted git repository.

adoroszlai pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 90b7c14  AMBARI-22726. Fix dependent tests in BlueprintImplTest (#133)
90b7c14 is described below

commit 90b7c1417484ad8c4792b3996603a326ae9618a4
Author: Reed Oei <oe...@gmail.com>
AuthorDate: Wed Jan 17 13:42:37 2018 -0600

    AMBARI-22726. Fix dependent tests in BlueprintImplTest (#133)
---
 .../ambari/server/topology/BlueprintImplTest.java  | 26 +++++++++++-----------
 1 file changed, 13 insertions(+), 13 deletions(-)

diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
index dd0adcc..d34526a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java
@@ -61,9 +61,10 @@ public class BlueprintImplTest {
   Map<String, Map<String, String>> properties = new HashMap<>();
   Map<String, String> hdfsProps = new HashMap<>();
   Configuration configuration = new Configuration(properties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
+  org.apache.ambari.server.configuration.Configuration serverConfig;
 
   @Before
-  public void setup() {
+  public void setup() throws NoSuchFieldException, IllegalAccessException {
     properties.put("hdfs-site", hdfsProps);
     hdfsProps.put("foo", "val");
     hdfsProps.put("bar", "val");
@@ -104,11 +105,12 @@ public class BlueprintImplTest {
     requiredService2Properties.add(new Stack.ConfigProperty("category2", "prop2", null));
     expect(stack.getRequiredConfigurationProperties("HDFS")).andReturn(requiredHDFSProperties).anyTimes();
     expect(stack.getRequiredConfigurationProperties("SERVICE2")).andReturn(requiredService2Properties).anyTimes();
+
+    serverConfig = setupConfigurationWithGPLLicense(true);
   }
 
   @Test
   public void testValidateConfigurations__basic_positive() throws Exception {
-    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     expect(group1.getCardinality()).andReturn("1").atLeastOnce();
     expect(group1.getComponents()).andReturn(Arrays.asList(new Component("c1"), new Component("c2"))).atLeastOnce();
     expect(group2.getCardinality()).andReturn("1").atLeastOnce();
@@ -138,7 +140,6 @@ public class BlueprintImplTest {
     group2Props.put("category2", group2Category2Props);
     group2Category2Props.put("prop2", "val");
 
-    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     // set config for group2 which contains a required property
     Configuration group2Configuration = new Configuration(group2Props, EMPTY_ATTRIBUTES, configuration);
     expect(group2.getConfiguration()).andReturn(group2Configuration).atLeastOnce();
@@ -178,6 +179,7 @@ public class BlueprintImplTest {
     Configuration group2Configuration = new Configuration(group2Props, EMPTY_ATTRIBUTES, configuration);
     expect(group2.getConfiguration()).andReturn(group2Configuration).atLeastOnce();
 
+
     expect(group1.getCardinality()).andReturn("1").atLeastOnce();
     expect(group1.getComponents()).andReturn(Arrays.asList(new Component("NAMENODE"),new Component("ZKFC"))).atLeastOnce();
     expect(group2.getCardinality()).andReturn("1").atLeastOnce();
@@ -198,13 +200,13 @@ public class BlueprintImplTest {
     properties.put("hadoop-env", hadoopProps);
     hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP::group1%");
     hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP::group2%");
-    replay(stack, group1, group2);
+    replay(stack, group1, group2, serverConfig);
 
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
     BlueprintEntity entity = blueprint.toEntity();
 
-    verify(stack, group1, group2);
+    verify(stack, group1, group2, serverConfig);
     assertTrue(entity.getSecurityType() == SecurityType.NONE);
     assertTrue(entity.getSecurityDescriptorReference() == null);
   }
@@ -240,10 +242,10 @@ public class BlueprintImplTest {
     properties.put("hadoop-env", hadoopProps);
     hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP::group2%");
     hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP::group3%");
-    replay(stack, group1, group2);
+    replay(stack, group1, group2, serverConfig);
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
-    verify(stack, group1, group2);
+    verify(stack, group1, group2, serverConfig);
   }
   @Test(expected= IllegalArgumentException.class)
   public void testValidateConfigurations__hostGroupConfigForNameNodeHAMappedSameHostGroup() throws Exception {
@@ -276,10 +278,10 @@ public class BlueprintImplTest {
     properties.put("hadoop-env", hadoopProps);
     hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP::group2%");
     hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP::group2%");
-    replay(stack, group1, group2);
+    replay(stack, group1, group2, serverConfig);
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null);
     blueprint.validateRequiredProperties();
-    verify(stack, group1, group2);
+    verify(stack, group1, group2, serverConfig);
   }
   @Test(expected = InvalidTopologyException.class)
   public void testValidateConfigurations__secretReference() throws InvalidTopologyException,
@@ -287,7 +289,6 @@ public class BlueprintImplTest {
     Map<String, Map<String, String>> group2Props = new HashMap<>();
     Map<String, String> group2Category2Props = new HashMap<>();
 
-    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     group2Props.put("category2", group2Category2Props);
     group2Category2Props.put("prop2", "val");
     hdfsProps.put("secret", "SECRET:hdfs-site:1:test");
@@ -307,7 +308,7 @@ public class BlueprintImplTest {
     }});
     Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
 
-    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(false);
+    serverConfig = setupConfigurationWithGPLLicense(false);
     replay(stack, group1, group2, serverConfig);
 
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null);
@@ -324,7 +325,7 @@ public class BlueprintImplTest {
     }});
     Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
 
-    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(false);
+    serverConfig = setupConfigurationWithGPLLicense(false);
     replay(stack, group1, group2, serverConfig);
 
     Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null);
@@ -342,7 +343,6 @@ public class BlueprintImplTest {
     }});
     Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION);
 
-    org.apache.ambari.server.configuration.Configuration serverConfig = setupConfigurationWithGPLLicense(true);
     expect(group2.getConfiguration()).andReturn(EMPTY_CONFIGURATION).atLeastOnce();
     replay(stack, group1, group2, serverConfig);
 

-- 
To stop receiving notification emails like this one, please contact
['"commits@ambari.apache.org" <co...@ambari.apache.org>'].