You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2017/10/11 08:38:40 UTC
[2/7] ambari git commit: AMBARI-22190. After merging trunk to
branch-3.0-perf some parts of code are missing. (mpapirkovskyy)
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
index b875db6..4eb213b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/UpgradeHelperTest.java
@@ -2537,6 +2537,50 @@ public class UpgradeHelperTest extends EasyMockSupport {
}
@Test
+ public void testSequentialServiceChecksWithServiceCheckFailure() throws Exception {
+ Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
+ assertTrue(upgrades.containsKey("upgrade_test_checks"));
+ UpgradePack upgrade = upgrades.get("upgrade_test_checks");
+ assertNotNull(upgrade);
+
+ // !!! fake skippable so we don't affect other tests
+ for (Grouping g : upgrade.getAllGroups()) {
+ if (g.name.equals("SERVICE_CHECK_1") || g.name.equals("SERVICE_CHECK_2")) {
+ g.skippable = true;
+ }
+ }
+
+ Cluster cluster = makeCluster();
+ cluster.deleteService("HDFS", new DeleteHostComponentStatusMetaData());
+ cluster.deleteService("YARN", new DeleteHostComponentStatusMetaData());
+
+ UpgradeContext context = getMockUpgradeContext(cluster, Direction.UPGRADE, UpgradeType.ROLLING, repositoryVersion2110,
+ RepositoryType.STANDARD, cluster.getServices().keySet(), m_masterHostResolver, false);
+ expect(context.isServiceCheckFailureAutoSkipped()).andReturn(Boolean.TRUE).atLeastOnce();
+
+ replay(context);
+
+ List<UpgradeGroupHolder> groups = m_upgradeHelper.createSequence(upgrade, context);
+ assertEquals(5, groups.size());
+
+ UpgradeGroupHolder serviceCheckGroup = groups.get(2);
+ assertEquals(ServiceCheckGrouping.class, serviceCheckGroup.groupClass);
+ assertEquals(4, serviceCheckGroup.items.size());
+
+ StageWrapper wrapper = serviceCheckGroup.items.get(0);
+ assertEquals(ServiceCheckGrouping.ServiceCheckStageWrapper.class, wrapper.getClass());
+ assertTrue(wrapper.getText().contains("ZooKeeper"));
+
+ wrapper = serviceCheckGroup.items.get(serviceCheckGroup.items.size()-1);
+ assertTrue(wrapper.getText().equals("Verifying Skipped Failures"));
+
+ // Do stacks cleanup
+ stackManagerMock.invalidateCurrentPaths();
+ ambariMetaInfo.init();
+ }
+
+
+ @Test
public void testPrematureServiceChecks() throws Exception {
Map<String, UpgradePack> upgrades = ambariMetaInfo.getUpgradePacks("HDP", "2.1.1");
assertTrue(upgrades.containsKey("upgrade_test_checks"));
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
index 09699c6..cbd146b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
@@ -35,9 +35,9 @@ import com.google.gson.reflect.TypeToken;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosComponentDescriptorTest {
- public static final String JSON_VALUE =
+ static final String JSON_VALUE =
" {" +
" \"name\": \"COMPONENT_NAME\"," +
" \"identities\": [" +
@@ -60,9 +60,9 @@ public class KerberosComponentDescriptorTest {
static {
Map<String, Object> identitiesMap = new TreeMap<>();
- identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE);
- identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_ALT.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE_ALT);
- identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE);
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE.get(KerberosIdentityDescriptor.KEY_NAME), KerberosIdentityDescriptorTest.MAP_VALUE);
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_ALT.get(KerberosIdentityDescriptor.KEY_NAME), KerberosIdentityDescriptorTest.MAP_VALUE_ALT);
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE.get(KerberosIdentityDescriptor.KEY_NAME), KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE);
Map<String, Object> serviceSiteProperties = new TreeMap<>();
serviceSiteProperties.put("service.component.property1", "red");
@@ -78,10 +78,10 @@ public class KerberosComponentDescriptorTest {
authToLocalRules.add("component.name.rules2");
MAP_VALUE = new TreeMap<>();
- MAP_VALUE.put("name", "A_DIFFERENT_COMPONENT_NAME");
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<>(identitiesMap.values()));
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+ MAP_VALUE.put(KerberosIdentityDescriptor.KEY_NAME, "A_DIFFERENT_COMPONENT_NAME");
+ MAP_VALUE.put(KerberosComponentDescriptor.KEY_IDENTITIES, new ArrayList<>(identitiesMap.values()));
+ MAP_VALUE.put(KerberosComponentDescriptor.KEY_CONFIGURATIONS, configurationsMap.values());
+ MAP_VALUE.put(KerberosComponentDescriptor.KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalRules);
}
static void validateFromJSON(KerberosComponentDescriptor componentDescriptor) {
@@ -238,4 +238,4 @@ public class KerberosComponentDescriptorTest {
validateUpdatedData(componentDescriptor);
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
index e891fde..afd6de2 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
@@ -32,7 +32,7 @@ import com.google.gson.reflect.TypeToken;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosConfigurationDescriptorTest {
private static final String JSON_SINGLE_VALUE =
"{ \"configuration-type\": {" +
@@ -243,4 +243,4 @@ public class KerberosConfigurationDescriptorTest {
Assert.assertEquals("black", properties.get("property1"));
Assert.assertEquals("white", properties.get("property2"));
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
index 7fb5624..cc33512 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
@@ -40,12 +40,12 @@ import com.google.gson.Gson;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosDescriptorTest {
private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
private static final KerberosServiceDescriptorFactory KERBEROS_SERVICE_DESCRIPTOR_FACTORY = new KerberosServiceDescriptorFactory();
- public static final String JSON_VALUE =
+ private static final String JSON_VALUE =
"{" +
" \"properties\": {" +
" \"realm\": \"${cluster-env/kerberos_domain}\"," +
@@ -59,30 +59,30 @@ public class KerberosDescriptorTest {
" ]" +
"}";
- public static final Map<String, Object> MAP_VALUE;
+ private static final Map<String, Object> MAP_VALUE;
static {
Map<String, Object> keytabOwnerMap = new TreeMap<>();
- keytabOwnerMap.put("name", "root");
- keytabOwnerMap.put("access", "rw");
+ keytabOwnerMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "root");
+ keytabOwnerMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "rw");
Map<String, Object> keytabGroupMap = new TreeMap<>();
- keytabGroupMap.put("name", "hadoop");
- keytabGroupMap.put("access", "r");
+ keytabGroupMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "hadoop");
+ keytabGroupMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "r");
Map<String, Object> keytabMap = new TreeMap<>();
- keytabMap.put("file", "/etc/security/keytabs/subject.service.keytab");
- keytabMap.put("owner", keytabOwnerMap);
- keytabMap.put("group", keytabGroupMap);
- keytabMap.put("configuration", "service-site/service2.component.keytab.file");
+ keytabMap.put(KerberosKeytabDescriptor.KEY_FILE, "/etc/security/keytabs/subject.service.keytab");
+ keytabMap.put(KerberosKeytabDescriptor.KEY_OWNER, keytabOwnerMap);
+ keytabMap.put(KerberosKeytabDescriptor.KEY_GROUP, keytabGroupMap);
+ keytabMap.put(KerberosKeytabDescriptor.KEY_CONFIGURATION, "service-site/service2.component.keytab.file");
Map<String, Object> sharedIdentityMap = new TreeMap<>();
- sharedIdentityMap.put("name", "shared");
- sharedIdentityMap.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
- sharedIdentityMap.put("keytab", keytabMap);
+ sharedIdentityMap.put(KerberosIdentityDescriptor.KEY_NAME, "shared");
+ sharedIdentityMap.put(KerberosIdentityDescriptor.KEY_PRINCIPAL, KerberosPrincipalDescriptorTest.MAP_VALUE);
+ sharedIdentityMap.put(KerberosIdentityDescriptor.KEY_KEYTAB, keytabMap);
Map<String, Object> servicesMap = new TreeMap<>();
- servicesMap.put((String) KerberosServiceDescriptorTest.MAP_VALUE.get("name"), KerberosServiceDescriptorTest.MAP_VALUE);
+ servicesMap.put((String) KerberosServiceDescriptorTest.MAP_VALUE.get(KerberosServiceDescriptor.KEY_NAME), KerberosServiceDescriptorTest.MAP_VALUE);
Map<String, Object> identitiesMap = new TreeMap<>();
identitiesMap.put("shared", sharedIdentityMap);
@@ -104,14 +104,14 @@ public class KerberosDescriptorTest {
properties.put("some.property", "Hello World");
MAP_VALUE = new TreeMap<>();
- MAP_VALUE.put("properties", properties);
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.SERVICE.getDescriptorPluralName(), servicesMap.values());
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), identitiesMap.values());
+ MAP_VALUE.put(KerberosDescriptor.KEY_PROPERTIES, properties);
+ MAP_VALUE.put(KerberosDescriptor.KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalRules);
+ MAP_VALUE.put(KerberosDescriptor.KEY_SERVICES, servicesMap.values());
+ MAP_VALUE.put(KerberosDescriptor.KEY_CONFIGURATIONS, configurationsMap.values());
+ MAP_VALUE.put(KerberosDescriptor.KEY_IDENTITIES, identitiesMap.values());
}
- public static void validateFromJSON(KerberosDescriptor descriptor) {
+ private static void validateFromJSON(KerberosDescriptor descriptor) {
Assert.assertNotNull(descriptor);
Assert.assertTrue(descriptor.isContainer());
@@ -146,7 +146,7 @@ public class KerberosDescriptorTest {
Assert.assertNull(configurations);
}
- public static void validateFromMap(KerberosDescriptor descriptor) throws AmbariException {
+ private static void validateFromMap(KerberosDescriptor descriptor) throws AmbariException {
Assert.assertNotNull(descriptor);
Assert.assertTrue(descriptor.isContainer());
@@ -219,7 +219,7 @@ public class KerberosDescriptorTest {
Assert.assertEquals("red", configProperties.get("property1"));
}
- public void validateUpdatedData(KerberosDescriptor descriptor) {
+ private void validateUpdatedData(KerberosDescriptor descriptor) {
Assert.assertNotNull(descriptor);
Map<String, String> properties = descriptor.getProperties();
@@ -420,7 +420,7 @@ public class KerberosDescriptorTest {
@Test
public void testGetReferencedIdentityDescriptor_Recursive() throws IOException {
- boolean identityFound = false;
+ boolean identityFound;
List<KerberosIdentityDescriptor> identities;
URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_get_referenced_identity_descriptor.json");
@@ -482,8 +482,8 @@ public class KerberosDescriptorTest {
public void testFiltersOutIdentitiesBasedonInstalledServices() throws IOException {
URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_filtering_identity_descriptor.json");
KerberosComponentDescriptor componentDescriptor = KERBEROS_DESCRIPTOR_FACTORY.createInstance(new File(systemResourceURL.getFile()))
- .getService("SERVICE1")
- .getComponent("SERVICE1_COMPONENT1");
+ .getService("SERVICE1")
+ .getComponent("SERVICE1_COMPONENT1");
List<KerberosIdentityDescriptor> identities = componentDescriptor.getIdentities(true, new HashedMap() {{
put("services", Collections.emptySet());
}});
@@ -502,4 +502,4 @@ public class KerberosDescriptorTest {
Assert.assertEquals("service2_component1@${realm}", principalsPerComponent.get("SERVICE2/SERVICE2_COMPONENT1/service2_component1_identity"));
Assert.assertEquals("service1@${realm}", principalsPerComponent.get("SERVICE1/service1_identity"));
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
index 44812de..5faf7d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
@@ -50,7 +50,7 @@ import com.google.inject.assistedinject.FactoryModuleBuilder;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosDescriptorUpdateHelperTest extends EasyMockSupport {
private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
private static final Gson GSON = new Gson();
@@ -2343,4 +2343,4 @@ public class KerberosDescriptorUpdateHelperTest extends EasyMockSupport {
"}\n").toMap()),
GSON.toJson(newValue.toMap()));
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
index d11962b..5c7075f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
@@ -31,9 +31,9 @@ import com.google.gson.reflect.TypeToken;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosIdentityDescriptorTest {
- public static final String JSON_VALUE =
+ static final String JSON_VALUE =
"{" +
" \"name\": \"identity_1\"" +
"," +
@@ -50,63 +50,58 @@ public class KerberosIdentityDescriptorTest {
static {
MAP_VALUE = new TreeMap<>();
- MAP_VALUE.put("name", "identity_1");
- MAP_VALUE.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
- MAP_VALUE.put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
- MAP_VALUE.put("password", "secret");
+ MAP_VALUE.put(KerberosIdentityDescriptor.KEY_NAME, "identity_1");
+ MAP_VALUE.put(KerberosIdentityDescriptor.KEY_PRINCIPAL, KerberosPrincipalDescriptorTest.MAP_VALUE);
+ MAP_VALUE.put(KerberosIdentityDescriptor.KEY_KEYTAB, KerberosKeytabDescriptorTest.MAP_VALUE);
MAP_VALUE_ALT = new TreeMap<>();
- MAP_VALUE_ALT.put("name", "identity_2");
- MAP_VALUE_ALT.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
- MAP_VALUE_ALT.put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
- MAP_VALUE_ALT.put("password", "secret2");
+ MAP_VALUE_ALT.put(KerberosIdentityDescriptor.KEY_NAME, "identity_2");
+ MAP_VALUE_ALT.put(KerberosIdentityDescriptor.KEY_PRINCIPAL, KerberosPrincipalDescriptorTest.MAP_VALUE);
+ MAP_VALUE_ALT.put(KerberosIdentityDescriptor.KEY_KEYTAB, KerberosKeytabDescriptorTest.MAP_VALUE);
TreeMap<String, Object> ownerMap = new TreeMap<>();
- ownerMap.put("name", "me");
- ownerMap.put("access", "rw");
+ ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "me");
+ ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "rw");
TreeMap<String, Object> groupMap = new TreeMap<>();
- groupMap.put("name", "nobody");
- groupMap.put("access", "");
+ groupMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "nobody");
+ groupMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "");
TreeMap<String, Object> keytabMap = new TreeMap<>();
- keytabMap.put("file", "/home/user/me/subject.service.keytab");
- keytabMap.put("owner", ownerMap);
- keytabMap.put("group", groupMap);
- keytabMap.put("configuration", "service-site/me.component.keytab.file");
+ keytabMap.put(KerberosKeytabDescriptor.KEY_FILE, "/home/user/me/subject.service.keytab");
+ keytabMap.put(KerberosKeytabDescriptor.KEY_OWNER, ownerMap);
+ keytabMap.put(KerberosKeytabDescriptor.KEY_GROUP, groupMap);
+ keytabMap.put(KerberosKeytabDescriptor.KEY_CONFIGURATION, "service-site/me.component.keytab.file");
MAP_VALUE_REFERENCE = new TreeMap<>();
- MAP_VALUE_REFERENCE.put("name", "shared_identity");
- MAP_VALUE_REFERENCE.put("reference", "/shared");
- MAP_VALUE_REFERENCE.put("keytab", keytabMap);
+ MAP_VALUE_REFERENCE.put(KerberosIdentityDescriptor.KEY_NAME, "shared_identity");
+ MAP_VALUE_REFERENCE.put(KerberosIdentityDescriptor.KEY_REFERENCE, "/shared");
+ MAP_VALUE_REFERENCE.put(KerberosIdentityDescriptor.KEY_KEYTAB, keytabMap);
}
- public static void validateFromJSON(KerberosIdentityDescriptor identityDescriptor) {
+ static void validateFromJSON(KerberosIdentityDescriptor identityDescriptor) {
Assert.assertNotNull(identityDescriptor);
Assert.assertFalse(identityDescriptor.isContainer());
KerberosPrincipalDescriptorTest.validateFromJSON(identityDescriptor.getPrincipalDescriptor());
KerberosKeytabDescriptorTest.validateFromJSON(identityDescriptor.getKeytabDescriptor());
- Assert.assertNull(identityDescriptor.getPassword());
}
- public static void validateFromMap(KerberosIdentityDescriptor identityDescriptor) {
+ static void validateFromMap(KerberosIdentityDescriptor identityDescriptor) {
Assert.assertNotNull(identityDescriptor);
Assert.assertFalse(identityDescriptor.isContainer());
KerberosPrincipalDescriptorTest.validateFromMap(identityDescriptor.getPrincipalDescriptor());
KerberosKeytabDescriptorTest.validateFromMap(identityDescriptor.getKeytabDescriptor());
- Assert.assertEquals("secret", identityDescriptor.getPassword());
}
- public static void validateUpdatedData(KerberosIdentityDescriptor identityDescriptor) {
+ static void validateUpdatedData(KerberosIdentityDescriptor identityDescriptor) {
Assert.assertNotNull(identityDescriptor);
KerberosPrincipalDescriptorTest.validateUpdatedData(identityDescriptor.getPrincipalDescriptor());
KerberosKeytabDescriptorTest.validateUpdatedData(identityDescriptor.getKeytabDescriptor());
- Assert.assertEquals("secret", identityDescriptor.getPassword());
}
private static KerberosIdentityDescriptor createFromJSON() {
@@ -167,4 +162,4 @@ public class KerberosIdentityDescriptorTest {
context.put("services", new HashSet<>(Arrays.asList("NOT_HIVE", "HDFS", "ZOOKEEPER")));
Assert.assertFalse(identityDescriptor.shouldInclude(context));
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
index 8cdb39e..8bb179d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
@@ -29,9 +29,9 @@ import com.google.gson.reflect.TypeToken;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosKeytabDescriptorTest {
- public static final String JSON_VALUE =
+ static final String JSON_VALUE =
"{" +
" \"file\": \"/etc/security/keytabs/${host}/subject.service.keytab\"," +
" \"owner\": {" +
@@ -45,25 +45,25 @@ public class KerberosKeytabDescriptorTest {
" \"configuration\": \"service-site/service.component.keytab.file\"" +
"}";
- public static final Map<String, Object> MAP_VALUE;
+ static final Map<String, Object> MAP_VALUE;
static {
TreeMap<String, Object> ownerMap = new TreeMap<>();
- ownerMap.put("name", "root");
- ownerMap.put("access", "rw");
+ ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "root");
+ ownerMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "rw");
TreeMap<String, Object> groupMap = new TreeMap<>();
- groupMap.put("name", "hadoop");
- groupMap.put("access", "r");
+ groupMap.put(KerberosKeytabDescriptor.KEY_ACL_NAME, "hadoop");
+ groupMap.put(KerberosKeytabDescriptor.KEY_ACL_ACCESS, "r");
MAP_VALUE = new TreeMap<>();
- MAP_VALUE.put("file", "/etc/security/keytabs/subject.service.keytab");
- MAP_VALUE.put("owner", ownerMap);
- MAP_VALUE.put("group", groupMap);
- MAP_VALUE.put("configuration", "service-site/service2.component.keytab.file");
+ MAP_VALUE.put(KerberosKeytabDescriptor.KEY_FILE, "/etc/security/keytabs/subject.service.keytab");
+ MAP_VALUE.put(KerberosKeytabDescriptor.KEY_OWNER, ownerMap);
+ MAP_VALUE.put(KerberosKeytabDescriptor.KEY_GROUP, groupMap);
+ MAP_VALUE.put(KerberosKeytabDescriptor.KEY_CONFIGURATION, "service-site/service2.component.keytab.file");
}
- public static void validateFromJSON(KerberosKeytabDescriptor keytabDescriptor) {
+ static void validateFromJSON(KerberosKeytabDescriptor keytabDescriptor) {
Assert.assertNotNull(keytabDescriptor);
Assert.assertFalse(keytabDescriptor.isContainer());
@@ -75,7 +75,7 @@ public class KerberosKeytabDescriptorTest {
Assert.assertEquals("service-site/service.component.keytab.file", keytabDescriptor.getConfiguration());
}
- public static void validateFromMap(KerberosKeytabDescriptor keytabDescriptor) {
+ static void validateFromMap(KerberosKeytabDescriptor keytabDescriptor) {
Assert.assertNotNull(keytabDescriptor);
Assert.assertFalse(keytabDescriptor.isContainer());
@@ -87,7 +87,7 @@ public class KerberosKeytabDescriptorTest {
Assert.assertEquals("service-site/service2.component.keytab.file", keytabDescriptor.getConfiguration());
}
- public static void validateUpdatedData(KerberosKeytabDescriptor keytabDescriptor) {
+ static void validateUpdatedData(KerberosKeytabDescriptor keytabDescriptor) {
Assert.assertNotNull(keytabDescriptor);
Assert.assertEquals("/etc/security/keytabs/subject.service.keytab", keytabDescriptor.getFile());
@@ -146,4 +146,4 @@ public class KerberosKeytabDescriptorTest {
validateUpdatedData(keytabDescriptor);
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
index b84223f..0e30d44 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
@@ -29,9 +29,9 @@ import com.google.gson.reflect.TypeToken;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosPrincipalDescriptorTest {
- public static final String JSON_VALUE =
+ static final String JSON_VALUE =
"{" +
"\"value\": \"service/_HOST@_REALM\"," +
"\"configuration\": \"service-site/service.component.kerberos.principal\"," +
@@ -39,27 +39,27 @@ public class KerberosPrincipalDescriptorTest {
"\"local_username\": \"localUser\"" +
"}";
- public static final String JSON_VALUE_SPARSE =
+ private static final String JSON_VALUE_SPARSE =
"{" +
"\"value\": \"serviceOther/_HOST@_REALM\"" +
"}";
public static final Map<String, Object> MAP_VALUE;
- public static final Map<String, Object> MAP_VALUE_SPARSE;
+ private static final Map<String, Object> MAP_VALUE_SPARSE;
static {
MAP_VALUE = new TreeMap<>();
- MAP_VALUE.put("value", "user@_REALM");
- MAP_VALUE.put("configuration", "service-site/service.component.kerberos.https.principal");
- MAP_VALUE.put("type", "user");
- MAP_VALUE.put("local_username", null);
+ MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_VALUE, "user@_REALM");
+ MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_CONFIGURATION, "service-site/service.component.kerberos.https.principal");
+ MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_TYPE, "user");
+ MAP_VALUE.put(KerberosPrincipalDescriptor.KEY_LOCAL_USERNAME, null);
MAP_VALUE_SPARSE = new TreeMap<>();
- MAP_VALUE_SPARSE.put("value", "userOther@_REALM");
+ MAP_VALUE_SPARSE.put(KerberosPrincipalDescriptor.KEY_VALUE, "userOther@_REALM");
}
- public static void validateFromJSON(KerberosPrincipalDescriptor principalDescriptor) {
+ static void validateFromJSON(KerberosPrincipalDescriptor principalDescriptor) {
Assert.assertNotNull(principalDescriptor);
Assert.assertFalse(principalDescriptor.isContainer());
Assert.assertEquals("service/_HOST@_REALM", principalDescriptor.getValue());
@@ -68,7 +68,7 @@ public class KerberosPrincipalDescriptorTest {
Assert.assertEquals("localUser", principalDescriptor.getLocalUsername());
}
- public static void validateFromMap(KerberosPrincipalDescriptor principalDescriptor) {
+ static void validateFromMap(KerberosPrincipalDescriptor principalDescriptor) {
Assert.assertNotNull(principalDescriptor);
Assert.assertFalse(principalDescriptor.isContainer());
Assert.assertEquals("user@_REALM", principalDescriptor.getValue());
@@ -77,7 +77,7 @@ public class KerberosPrincipalDescriptorTest {
Assert.assertNull(principalDescriptor.getLocalUsername());
}
- public static void validateUpdatedData(KerberosPrincipalDescriptor principalDescriptor) {
+ static void validateUpdatedData(KerberosPrincipalDescriptor principalDescriptor) {
Assert.assertNotNull(principalDescriptor);
Assert.assertEquals("user@_REALM", principalDescriptor.getValue());
Assert.assertEquals("service-site/service.component.kerberos.https.principal", principalDescriptor.getConfiguration());
@@ -192,4 +192,4 @@ public class KerberosPrincipalDescriptorTest {
Assert.assertEquals(KerberosPrincipalType.USER, principalDescriptor.getType());
Assert.assertNull(principalDescriptor.getLocalUsername());
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
index e4d3c90..17134c0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
@@ -37,9 +37,9 @@ import com.google.gson.Gson;
import junit.framework.Assert;
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
public class KerberosServiceDescriptorTest {
- public static final String JSON_VALUE =
+ static final String JSON_VALUE =
"{" +
" \"name\": \"SERVICE_NAME\"," +
" \"preconfigure\": \"true\"," +
@@ -62,7 +62,7 @@ public class KerberosServiceDescriptorTest {
" ]" +
"}";
- public static final String JSON_VALUE_SERVICES =
+ private static final String JSON_VALUE_SERVICES =
"{ " +
"\"services\" : [" +
"{" +
@@ -130,22 +130,22 @@ public class KerberosServiceDescriptorTest {
MAP_VALUE = new TreeMap<>();
MAP_VALUE.put("name", "A_DIFFERENT_SERVICE_NAME");
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), identitiesMap.values());
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.COMPONENT.getDescriptorPluralName(), componentsMap.values());
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
- MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+ MAP_VALUE.put(KerberosServiceDescriptor.KEY_IDENTITIES, identitiesMap.values());
+ MAP_VALUE.put(KerberosServiceDescriptor.KEY_COMPONENTS, componentsMap.values());
+ MAP_VALUE.put(KerberosServiceDescriptor.KEY_CONFIGURATIONS, configurationsMap.values());
+ MAP_VALUE.put(KerberosServiceDescriptor.KEY_AUTH_TO_LOCAL_PROPERTIES, authToLocalRules);
}
private static final KerberosServiceDescriptorFactory KERBEROS_SERVICE_DESCRIPTOR_FACTORY = new KerberosServiceDescriptorFactory();
- public static void validateFromJSON(KerberosServiceDescriptor[] serviceDescriptors) {
+ private static void validateFromJSON(KerberosServiceDescriptor[] serviceDescriptors) {
Assert.assertNotNull(serviceDescriptors);
Assert.assertEquals(2, serviceDescriptors.length);
validateFromJSON(serviceDescriptors[0]);
}
- public static void validateFromJSON(KerberosServiceDescriptor serviceDescriptor) {
+ static void validateFromJSON(KerberosServiceDescriptor serviceDescriptor) {
Assert.assertNotNull(serviceDescriptor);
Assert.assertTrue(serviceDescriptor.isContainer());
@@ -190,7 +190,7 @@ public class KerberosServiceDescriptorTest {
Assert.assertEquals("service.name.rules1", authToLocalProperties.iterator().next());
}
- public static void validateFromMap(KerberosServiceDescriptor serviceDescriptor) {
+ static void validateFromMap(KerberosServiceDescriptor serviceDescriptor) {
Assert.assertNotNull(serviceDescriptor);
Assert.assertTrue(serviceDescriptor.isContainer());
@@ -235,7 +235,7 @@ public class KerberosServiceDescriptorTest {
Assert.assertEquals("service.name.rules2", authToLocalProperties.iterator().next());
}
- public void validateUpdatedData(KerberosServiceDescriptor serviceDescriptor) {
+ private void validateUpdatedData(KerberosServiceDescriptor serviceDescriptor) {
Assert.assertNotNull(serviceDescriptor);
Assert.assertEquals("A_DIFFERENT_SERVICE_NAME", serviceDescriptor.getName());
@@ -387,8 +387,6 @@ public class KerberosServiceDescriptorTest {
/**
* Test a JSON object in which only only a Service and configs are defined, but no Components.
- *
- * @throws AmbariException
*/
@Test
public void testJSONWithOnlyServiceNameAndConfigurations() throws AmbariException {
@@ -422,4 +420,4 @@ public class KerberosServiceDescriptorTest {
Assert.assertNotNull(serviceDescriptor);
Assert.assertEquals("A_DIFFERENT_SERVICE_NAME", serviceDescriptor.getName());
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
index 616139c..0eac2be 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/UpgradePackTest.java
@@ -173,6 +173,7 @@ public class UpgradePackTest {
ConfigureTask ct = (ConfigureTask) t;
// check that the Configure task successfully parsed id
assertEquals("hdp_2_1_1_nm_pre_upgrade", ct.getId());
+ assertFalse(ct.supportsPatch);
}
@Test
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
index c3248a3..0daa20f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
@@ -391,7 +391,7 @@ public class ClusterDeployWithStartOnlyTest extends EasyMockSupport {
ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
expectLastCall().once();
- expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
+ expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
persistedTopologyRequest = new PersistedTopologyRequest(1, request);
expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).once();
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
index 372d0a1..bbf4fdb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
@@ -368,7 +368,7 @@ public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupp
ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
expectLastCall().once();
- expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
+ expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
persistedTopologyRequest = new PersistedTopologyRequest(1, request);
expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).once();
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
index 9620507..059a8be 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
@@ -363,7 +363,7 @@ public class ClusterInstallWithoutStartTest extends EasyMockSupport {
ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
expectLastCall().once();
- expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(2);
+ expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
persistedTopologyRequest = new PersistedTopologyRequest(1, request);
expect(persistedState.getAllRequests()).andReturn(Collections.emptyMap()).once();
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
index feefcab..b2dac8f 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.topology;
+import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
@@ -30,6 +31,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Executors;
+import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
import org.easymock.EasyMockRule;
import org.easymock.Mock;
@@ -60,12 +62,18 @@ public class ConfigureClusterTaskTest {
@Mock(type = MockType.STRICT)
private ClusterTopology clusterTopology;
+ @Mock(type = MockType.STRICT)
+ private AmbariContext ambariContext;
+
+ @Mock(type = MockType.NICE)
+ private AmbariEventPublisher ambariEventPublisher;
+
private ConfigureClusterTask testSubject;
@Before
public void before() {
- reset(clusterConfigurationRequest, clusterTopology);
- testSubject = new ConfigureClusterTask(clusterTopology, clusterConfigurationRequest);
+ reset(clusterConfigurationRequest, clusterTopology, ambariContext, ambariEventPublisher);
+ testSubject = new ConfigureClusterTask(clusterTopology, clusterConfigurationRequest, ambariEventPublisher);
}
@Test
@@ -75,11 +83,15 @@ public class ConfigureClusterTaskTest {
// is it OK to handle the non existence of hostgroups as a success?!
expect(clusterConfigurationRequest.getRequiredHostGroups()).andReturn(Collections.emptyList());
expect(clusterTopology.getHostGroupInfo()).andReturn(Collections.emptyMap());
+ expect(clusterTopology.getClusterId()).andReturn(1L).anyTimes();
+ expect(clusterTopology.getAmbariContext()).andReturn(ambariContext);
+ expect(ambariContext.getClusterName(1L)).andReturn("testCluster");
// this is only called if the "prerequisites" are satisfied
clusterConfigurationRequest.process();
+ ambariEventPublisher.publish(anyObject());
- replay(clusterConfigurationRequest, clusterTopology);
+ replay(clusterConfigurationRequest, clusterTopology, ambariContext, ambariEventPublisher);
// WHEN
Boolean result = testSubject.call();
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
index 45c8b1a..ac643d7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
@@ -118,6 +118,7 @@ public class TopologyManagerTest {
@Mock(type = MockType.NICE)
private ProvisionClusterRequest request;
+
private final PersistedTopologyRequest persistedTopologyRequest = new PersistedTopologyRequest(1, request);
@Mock(type = MockType.STRICT)
private LogicalRequestFactory logicalRequestFactory;
@@ -284,7 +285,6 @@ public class TopologyManagerTest {
expect(request.getDescription()).andReturn("Provision Cluster Test").anyTimes();
expect(request.getConfiguration()).andReturn(topoConfiguration).anyTimes();
expect(request.getHostGroupInfo()).andReturn(groupInfoMap).anyTimes();
- expect(request.getRepositoryVersion()).andReturn("1").anyTimes();
expect(request.getConfigRecommendationStrategy()).andReturn(ConfigRecommendationStrategy.NEVER_APPLY).anyTimes();
expect(request.getSecurityConfiguration()).andReturn(null).anyTimes();
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 2a62f2e..52c3f62 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -22,6 +22,8 @@ import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
@@ -29,7 +31,10 @@ import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertTrue;
+import java.io.File;
+import java.net.URL;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
@@ -37,6 +42,7 @@ import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -45,29 +51,50 @@ import javax.persistence.EntityManager;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.ActionManager;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
import org.apache.ambari.server.configuration.Configuration;
import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.controller.KerberosHelper;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
import org.apache.ambari.server.controller.MaintenanceStateHelper;
+import org.apache.ambari.server.controller.ServiceConfigVersionResponse;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.dao.WidgetDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.orm.entities.WidgetEntity;
+import org.apache.ambari.server.stack.StackManagerFactory;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.stack.OsFamily;
+import org.apache.commons.io.FileUtils;
import org.easymock.Capture;
+import org.easymock.EasyMock;
import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
import org.easymock.Mock;
import org.easymock.MockType;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
+import com.google.common.collect.Maps;
import com.google.gson.Gson;
+import com.google.inject.AbstractModule;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
@@ -105,38 +132,16 @@ public class UpgradeCatalog260Test {
@Mock(type = MockType.NICE)
private OsFamily osFamily;
- @Mock(type = MockType.NICE)
- private KerberosHelper kerberosHelper;
-
- @Mock(type = MockType.NICE)
- private ActionManager actionManager;
-
- @Mock(type = MockType.NICE)
- private Config config;
-
- @Mock(type = MockType.STRICT)
- private Service service;
-
- @Mock(type = MockType.NICE)
- private Clusters clusters;
-
- @Mock(type = MockType.NICE)
- private Cluster cluster;
-
- @Mock(type = MockType.NICE)
- private Injector injector;
+ @Rule
+ public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void init() {
- reset(entityManagerProvider, injector);
+ reset(entityManagerProvider);
expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
- expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
- expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
- expect(injector.getInstance(KerberosHelper.class)).andReturn(kerberosHelper).anyTimes();
-
- replay(entityManagerProvider, injector);
+ replay(entityManagerProvider);
}
@After
@@ -190,24 +195,18 @@ public class UpgradeCatalog260Test {
expectDropStaleTables();
Capture<DBColumnInfo> repoVersionHiddenColumnCapture = newCapture();
- expectUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture);
+ Capture<DBColumnInfo> repoVersionResolvedColumnCapture = newCapture();
+ expectUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture, repoVersionResolvedColumnCapture);
Capture<DBColumnInfo> unapped = newCapture();
expectRenameServiceDeletedColumn(unapped);
- replay(dbAccessor, configuration, connection, statement, resultSet);
+ expectAddViewUrlPKConstraint();
+ expectRemoveStaleConstraints();
- Module module = new Module() {
- @Override
- public void configure(Binder binder) {
- binder.bind(DBAccessor.class).toInstance(dbAccessor);
- binder.bind(OsFamily.class).toInstance(osFamily);
- binder.bind(EntityManager.class).toInstance(entityManager);
- binder.bind(Configuration.class).toInstance(configuration);
- }
- };
+ replay(dbAccessor, configuration, connection, statement, resultSet);
- Injector injector = Guice.createInjector(module);
+ Injector injector = getInjector();
UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
upgradeCatalog260.executeDDLUpdates();
@@ -219,7 +218,18 @@ public class UpgradeCatalog260Test {
verifyAddSelectedCollumsToClusterconfigTable(selectedColumnInfo, selectedmappingColumnInfo, selectedTimestampColumnInfo, createTimestampColumnInfo);
verifyUpdateUpgradeTable(rvid, orchestration, revertAllowed);
verifyCreateUpgradeHistoryTable(columns);
- verifyUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture);
+ verifyUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture, repoVersionResolvedColumnCapture);
+ }
+
+ private void expectRemoveStaleConstraints() throws SQLException {
+ dbAccessor.dropUniqueConstraint(eq(UpgradeCatalog260.USERS_TABLE), eq(UpgradeCatalog260.STALE_POSTGRESS_USERS_LDAP_USER_KEY));
+ }
+
+ private void expectAddViewUrlPKConstraint() throws SQLException {
+ dbAccessor.dropPKConstraint(eq(UpgradeCatalog260.VIEWURL_TABLE), eq(UpgradeCatalog260.STALE_POSTGRESS_VIEWURL_PKEY));
+ expectLastCall().once();
+ dbAccessor.addPKConstraint(eq(UpgradeCatalog260.VIEWURL_TABLE), eq(UpgradeCatalog260.PK_VIEWURL), eq(UpgradeCatalog260.URL_ID_COLUMN));
+ expectLastCall().once();
}
public void expectDropStaleTables() throws SQLException {
@@ -231,7 +241,7 @@ public class UpgradeCatalog260Test {
expectLastCall().once();
}
- public void expectRenameServiceDeletedColumn(Capture<DBColumnInfo> unmapped) throws SQLException {
+ public void expectRenameServiceDeletedColumn(Capture<DBColumnInfo> unmapped) throws SQLException {
dbAccessor.renameColumn(eq(UpgradeCatalog260.CLUSTER_CONFIG_TABLE), eq(UpgradeCatalog260.SERVICE_DELETED_COLUMN), capture(unmapped));
expectLastCall().once();
}
@@ -301,7 +311,7 @@ public class UpgradeCatalog260Test {
}
public void verifyUpdateUpgradeTable(Capture<DBColumnInfo> rvid,
- Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed) {
+ Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed) {
DBColumnInfo rvidValue = rvid.getValue();
Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_ID_COLUMN, rvidValue.getName());
Assert.assertEquals(Long.class, rvidValue.getType());
@@ -325,7 +335,7 @@ public class UpgradeCatalog260Test {
}
public void expectUpdateUpgradeTable(Capture<DBColumnInfo> rvid,
- Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed)
+ Capture<DBColumnInfo> orchestration, Capture<DBColumnInfo> revertAllowed)
throws SQLException {
dbAccessor.clearTable(eq(UpgradeCatalog260.UPGRADE_TABLE));
@@ -471,8 +481,8 @@ public class UpgradeCatalog260Test {
}
public void verifyGetCurrentVersionID(Capture<String[]> scdcaptureKey, Capture<String[]> scdcaptureValue) {
- Assert.assertTrue(Arrays.equals(scdcaptureKey.getValue(), new String[]{UpgradeCatalog260.STATE_COLUMN}));
- Assert.assertTrue(Arrays.equals(scdcaptureValue.getValue(), new String[]{UpgradeCatalog260.CURRENT}));
+ assertTrue(Arrays.equals(scdcaptureKey.getValue(), new String[]{UpgradeCatalog260.STATE_COLUMN}));
+ assertTrue(Arrays.equals(scdcaptureValue.getValue(), new String[]{UpgradeCatalog260.CURRENT}));
}
public void expectUpdateServiceComponentDesiredStateTable(Capture<DBColumnInfo> scdstadd1, Capture<DBColumnInfo> scdstalter1, Capture<DBColumnInfo> scdstadd2, Capture<DBColumnInfo> scdstalter2) throws SQLException {
@@ -527,19 +537,9 @@ public class UpgradeCatalog260Test {
expectLastCall().once();
replay(dbAccessor, configuration, connection, statement, resultSet);
- Module module = new Module() {
- @Override
- public void configure(Binder binder) {
- binder.bind(DBAccessor.class).toInstance(dbAccessor);
- binder.bind(OsFamily.class).toInstance(osFamily);
- binder.bind(EntityManager.class).toInstance(entityManager);
- binder.bind(Configuration.class).toInstance(configuration);
- }
- };
-
- Injector injector = Guice.createInjector(module);
+ Injector injector = getInjector();
UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
- upgradeCatalog260.removeSupersetFromDruid();
+ upgradeCatalog260.executePreDMLUpdates();
verify(dbAccessor);
@@ -552,42 +552,40 @@ public class UpgradeCatalog260Test {
* @param hiddenColumnCapture
* @throws SQLException
*/
- public void expectUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture) throws SQLException {
+ public void expectUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture,
+ Capture<DBColumnInfo> repoVersionResolvedColumnCapture) throws SQLException {
dbAccessor.addColumn(eq(UpgradeCatalog260.REPO_VERSION_TABLE), capture(hiddenColumnCapture));
+ dbAccessor.addColumn(eq(UpgradeCatalog260.REPO_VERSION_TABLE), capture(repoVersionResolvedColumnCapture));
expectLastCall().once();
}
- public void verifyUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture) {
+ public void verifyUpdateRepositoryVersionTableTable(Capture<DBColumnInfo> hiddenColumnCapture,
+ Capture<DBColumnInfo> resolvedColumnCapture) {
DBColumnInfo hiddenColumn = hiddenColumnCapture.getValue();
Assert.assertEquals(0, hiddenColumn.getDefaultValue());
Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_HIDDEN_COLUMN, hiddenColumn.getName());
Assert.assertEquals(false, hiddenColumn.isNullable());
+
+ DBColumnInfo resolvedColumn = resolvedColumnCapture.getValue();
+ Assert.assertEquals(0, resolvedColumn.getDefaultValue());
+ Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_RESOLVED_COLUMN, resolvedColumn.getName());
+ Assert.assertEquals(false, resolvedColumn.isNullable());
}
@Test
public void testEnsureZeppelinProxyUserConfigs() throws AmbariException {
- final Clusters clusters = createMock(Clusters.class);
+ Injector injector = getInjector();
+
+ final Clusters clusters = injector.getInstance(Clusters.class);
final Cluster cluster = createMock(Cluster.class);
final Config zeppelinEnvConf = createMock(Config.class);
final Config coreSiteConf = createMock(Config.class);
final Config coreSiteConfNew = createMock(Config.class);
- final AmbariManagementController controller = createMock(AmbariManagementController.class);
+ final AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
Capture<? extends Map<String, String>> captureCoreSiteConfProperties = newCapture();
- Module module = new Module() {
- @Override
- public void configure(Binder binder) {
- binder.bind(DBAccessor.class).toInstance(dbAccessor);
- binder.bind(OsFamily.class).toInstance(osFamily);
- binder.bind(EntityManager.class).toInstance(entityManager);
- binder.bind(Configuration.class).toInstance(configuration);
- binder.bind(Clusters.class).toInstance(clusters);
- binder.bind(AmbariManagementController.class).toInstance(controller);
- }
- };
-
expect(clusters.getClusters()).andReturn(Collections.singletonMap("c1", cluster)).once();
expect(cluster.getClusterName()).andReturn("c1").atLeastOnce();
@@ -610,14 +608,264 @@ public class UpgradeCatalog260Test {
replay(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller);
- Injector injector = Guice.createInjector(module);
UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
upgradeCatalog260.ensureZeppelinProxyUserConfigs();
verify(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller);
- Assert.assertTrue(captureCoreSiteConfProperties.hasCaptured());
+ assertTrue(captureCoreSiteConfProperties.hasCaptured());
Assert.assertEquals("existing_value", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.hosts"));
Assert.assertEquals("*", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.groups"));
}
+
+ @Test
+ public void testUpdateKerberosDescriptorArtifact() throws Exception {
+
+ Injector injector = getInjector();
+
+ URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_ranger_kms.json");
+ Assert.assertNotNull(systemResourceURL);
+
+ final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(new File(systemResourceURL.getFile()));
+ Assert.assertNotNull(kerberosDescriptor);
+
+ KerberosServiceDescriptor serviceDescriptor;
+ serviceDescriptor = kerberosDescriptor.getService("RANGER_KMS");
+ Assert.assertNotNull(serviceDescriptor);
+ Assert.assertNotNull(serviceDescriptor.getIdentity("/smokeuser"));
+ Assert.assertNotNull(serviceDescriptor.getIdentity("/spnego"));
+
+ KerberosComponentDescriptor componentDescriptor;
+ componentDescriptor = serviceDescriptor.getComponent("RANGER_KMS_SERVER");
+ Assert.assertNotNull(componentDescriptor);
+ Assert.assertNotNull(componentDescriptor.getIdentity("/smokeuser"));
+ Assert.assertNotNull(componentDescriptor.getIdentity("/spnego"));
+ Assert.assertNotNull(componentDescriptor.getIdentity("/spnego").getPrincipalDescriptor());
+ Assert.assertEquals("invalid_name@${realm}", componentDescriptor.getIdentity("/spnego").getPrincipalDescriptor().getValue());
+
+ ArtifactEntity artifactEntity = createMock(ArtifactEntity.class);
+
+ expect(artifactEntity.getArtifactData()).andReturn(kerberosDescriptor.toMap()).once();
+
+ Capture<Map<String, Object>> captureMap = newCapture();
+ expect(artifactEntity.getForeignKeys()).andReturn(Collections.singletonMap("cluster", "2"));
+ artifactEntity.setArtifactData(capture(captureMap));
+ expectLastCall().once();
+
+ ArtifactDAO artifactDAO = createMock(ArtifactDAO.class);
+ expect(artifactDAO.merge(artifactEntity)).andReturn(artifactEntity).atLeastOnce();
+
+ Map<String, String> properties = new HashMap<>();
+ properties.put("ranger.ks.kerberos.principal", "correct_value@EXAMPLE.COM");
+ properties.put("xasecure.audit.jaas.Client.option.principal", "wrong_value@EXAMPLE.COM");
+
+ Config config = createMock(Config.class);
+ expect(config.getProperties()).andReturn(properties).anyTimes();
+ expect(config.getPropertiesAttributes()).andReturn(Collections.<String, Map<String, String>>emptyMap()).anyTimes();
+ expect(config.getTag()).andReturn("version1").anyTimes();
+ expect(config.getType()).andReturn("ranger-kms-audit").anyTimes();
+
+ Config newConfig = createMock(Config.class);
+ expect(newConfig.getTag()).andReturn("version2").anyTimes();
+ expect(newConfig.getType()).andReturn("ranger-kms-audit").anyTimes();
+
+ ServiceConfigVersionResponse response = createMock(ServiceConfigVersionResponse.class);
+
+ StackId stackId = createMock(StackId.class);
+
+ Cluster cluster = createMock(Cluster.class);
+ expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+ expect(cluster.getDesiredConfigByType("dbks-site")).andReturn(config).anyTimes();
+ expect(cluster.getDesiredConfigByType("ranger-kms-audit")).andReturn(config).anyTimes();
+ expect(cluster.getConfigsByType("ranger-kms-audit")).andReturn(Collections.singletonMap("version1", config)).anyTimes();
+ expect(cluster.getServiceByConfigType("ranger-kms-audit")).andReturn("RANGER").anyTimes();
+ expect(cluster.getClusterName()).andReturn("cl1").anyTimes();
+ expect(cluster.getConfig(eq("ranger-kms-audit"), anyString())).andReturn(newConfig).once();
+ expect(cluster.addDesiredConfig("ambari-upgrade", Collections.singleton(newConfig), "Updated ranger-kms-audit during Ambari Upgrade from 2.5.2 to 2.6.0.")).andReturn(response).once();
+
+ final Clusters clusters = injector.getInstance(Clusters.class);
+ expect(clusters.getCluster(2L)).andReturn(cluster).anyTimes();
+
+ Capture<? extends Map<String, String>> captureProperties = newCapture();
+
+ AmbariManagementController controller = injector.getInstance(AmbariManagementController.class);
+ expect(controller.createConfig(eq(cluster), eq(stackId), eq("ranger-kms-audit"), capture(captureProperties), anyString(), anyObject(Map.class)))
+ .andReturn(null)
+ .once();
+
+ replay(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, response, controller, stackId);
+
+ UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
+ upgradeCatalog260.updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
+ verify(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, response, controller, stackId);
+
+ KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(captureMap.getValue());
+ Assert.assertNotNull(kerberosDescriptorUpdated);
+
+ Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("/smokeuser"));
+ Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("/smokeuser"));
+
+ KerberosIdentityDescriptor identity;
+
+ Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("/spnego"));
+ identity = kerberosDescriptorUpdated.getService("RANGER_KMS").getIdentity("ranger_kms_spnego");
+ Assert.assertNotNull(identity);
+ Assert.assertEquals("/spnego", identity.getReference());
+
+ Assert.assertNull(kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("/spnego"));
+ identity = kerberosDescriptorUpdated.getService("RANGER_KMS").getComponent("RANGER_KMS_SERVER").getIdentity("ranger_kms_ranger_kms_server_spnego");
+ Assert.assertNotNull(identity);
+ Assert.assertEquals("/spnego", identity.getReference());
+ Assert.assertNotNull(identity.getPrincipalDescriptor());
+ Assert.assertNull(identity.getPrincipalDescriptor().getValue());
+
+ Assert.assertTrue(captureProperties.hasCaptured());
+ Map<String, String> newProperties = captureProperties.getValue();
+ Assert.assertEquals("correct_value@EXAMPLE.COM", newProperties.get("xasecure.audit.jaas.Client.option.principal"));
+ }
+
+ @Test
+ public void testUpdateAmsConfigs() throws Exception {
+
+ Map<String, String> oldProperties = new HashMap<String, String>() {
+ {
+ put("ssl.client.truststore.location", "/some/location");
+ put("ssl.client.truststore.alias", "test_alias");
+ }
+ };
+ Map<String, String> newProperties = new HashMap<String, String>() {
+ {
+ put("ssl.client.truststore.location", "/some/location");
+ }
+ };
+
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+ Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+ final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+ Config mockAmsSslClient = easyMockSupport.createNiceMock(Config.class);
+
+ expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", cluster);
+ }}).once();
+ expect(cluster.getDesiredConfigByType("ams-ssl-client")).andReturn(mockAmsSslClient).atLeastOnce();
+ expect(mockAmsSslClient.getProperties()).andReturn(oldProperties).anyTimes();
+
+ Injector injector = easyMockSupport.createNiceMock(Injector.class);
+ expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+ expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+
+ replay(injector, clusters, mockAmsSslClient, cluster);
+
+ AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+ .addMockedMethod("createConfiguration")
+ .addMockedMethod("getClusters", new Class[] { })
+ .addMockedMethod("createConfig")
+ .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+ .createNiceMock();
+
+ Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+ Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+ expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+ expect(controller.getClusters()).andReturn(clusters).anyTimes();
+ expect(controller.createConfig(anyObject(Cluster.class), anyObject(StackId.class), anyString(), capture(propertiesCapture), anyString(),
+ anyObject(Map.class))).andReturn(createNiceMock(Config.class)).once();
+
+ replay(controller, injector2);
+ new UpgradeCatalog260(injector2).updateAmsConfigs();
+ easyMockSupport.verifyAll();
+
+ Map<String, String> updatedProperties = propertiesCapture.getValue();
+ assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
+ }
+
+ @Test
+ public void testHDFSWidgetUpdate() throws Exception {
+ final Clusters clusters = createNiceMock(Clusters.class);
+ final Cluster cluster = createNiceMock(Cluster.class);
+ final AmbariManagementController controller = createNiceMock(AmbariManagementController.class);
+ final Gson gson = new Gson();
+ final WidgetDAO widgetDAO = createNiceMock(WidgetDAO.class);
+ final AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class);
+ WidgetEntity widgetEntity = createNiceMock(WidgetEntity.class);
+ StackId stackId = new StackId("HDP", "2.0.0");
+ StackInfo stackInfo = createNiceMock(StackInfo.class);
+ ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class);
+ Service service = createNiceMock(Service.class);
+
+ String widgetStr = "{\n" +
+ " \"layouts\": [\n" +
+ " {\n" +
+ " \"layout_name\": \"default_hdfs_heatmap\",\n" +
+ " \"display_name\": \"Standard HDFS HeatMaps\",\n" +
+ " \"section_name\": \"HDFS_HEATMAPS\",\n" +
+ " \"widgetLayoutInfo\": [\n" +
+ " {\n" +
+ " \"widget_name\": \"HDFS Bytes Read\",\n" +
+ " \"metrics\": [],\n" +
+ " \"values\": []\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ "}";
+
+ File dataDirectory = temporaryFolder.newFolder();
+ File file = new File(dataDirectory, "hdfs_widget.json");
+ FileUtils.writeStringToFile(file, widgetStr);
+
+ final Injector mockInjector = Guice.createInjector(new AbstractModule() {
+ @Override
+ protected void configure() {
+ bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+ bind(AmbariManagementController.class).toInstance(controller);
+ bind(Clusters.class).toInstance(clusters);
+ bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+ bind(Gson.class).toInstance(gson);
+ bind(WidgetDAO.class).toInstance(widgetDAO);
+ bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class));
+ bind(AmbariMetaInfo.class).toInstance(metaInfo);
+ }
+ });
+ expect(controller.getClusters()).andReturn(clusters).anyTimes();
+ expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", cluster);
+ }}).anyTimes();
+ expect(cluster.getServices()).andReturn(Collections.singletonMap("HDFS", service)).anyTimes();
+ expect(cluster.getClusterId()).andReturn(1L).anyTimes();
+ expect(service.getDesiredStackId()).andReturn(stackId).anyTimes();
+ expect(stackInfo.getService("HDFS")).andReturn(serviceInfo);
+ expect(cluster.getDesiredStackVersion()).andReturn(stackId).anyTimes();
+ expect(metaInfo.getStack("HDP", "2.0.0")).andReturn(stackInfo).anyTimes();
+ expect(serviceInfo.getWidgetsDescriptorFile()).andReturn(file).anyTimes();
+
+ expect(widgetDAO.findByName(1L, "HDFS Bytes Read", "ambari", "HDFS_HEATMAPS"))
+ .andReturn(Collections.singletonList(widgetEntity));
+ expect(widgetDAO.merge(widgetEntity)).andReturn(null);
+ expect(widgetEntity.getWidgetName()).andReturn("HDFS Bytes Read").anyTimes();
+
+ replay(clusters, cluster, controller, widgetDAO, metaInfo, widgetEntity, stackInfo, serviceInfo, service);
+
+ mockInjector.getInstance(UpgradeCatalog260.class).updateHDFSWidgetDefinition();
+
+ verify(clusters, cluster, controller, widgetDAO, widgetEntity, stackInfo, serviceInfo);
+ }
+
+ private Injector getInjector() {
+
+ return Guice.createInjector(new Module() {
+ @Override
+ public void configure(Binder binder) {
+ binder.bind(DBAccessor.class).toInstance(dbAccessor);
+ binder.bind(OsFamily.class).toInstance(osFamily);
+ binder.bind(EntityManager.class).toInstance(entityManager);
+ binder.bind(Configuration.class).toInstance(configuration);
+ binder.bind(Clusters.class).toInstance(createMock(Clusters.class));
+ binder.bind(AmbariManagementController.class).toInstance(createMock(AmbariManagementController.class));
+ }
+ });
+ }
+
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/5ae98dbe/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java
new file mode 100644
index 0000000..a04c38b
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/utils/ManagedThreadPoolExecutorTest.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.utils;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class ManagedThreadPoolExecutorTest {
+
+ @Test
+ public void testGetHostAndPortFromProperty() {
+
+ ManagedThreadPoolExecutor topologyTaskExecutor = new ManagedThreadPoolExecutor(1,
+ 1, 0L, TimeUnit.MILLISECONDS,
+ new LinkedBlockingQueue<Runnable>());
+ Future<Boolean> feature = topologyTaskExecutor.submit(new Callable<Boolean>() {
+ @Override
+ public Boolean call() {
+ return Boolean.TRUE;
+ }
+ });
+
+ Assert.assertTrue(!topologyTaskExecutor.isRunning());
+ topologyTaskExecutor.start();
+ Assert.assertTrue(topologyTaskExecutor.isRunning());
+ topologyTaskExecutor.stop();
+ Assert.assertTrue(!topologyTaskExecutor.isRunning());
+
+ }
+}