You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mr...@apache.org on 2017/11/27 23:29:15 UTC
[03/30] ambari git commit: Merge trunk with feature branch and fix
some UT compilation issues (mradhakrishnan)
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
index 04d03be..f94adbe 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/kerberos/MITKerberosOperationHandlerTest.java
@@ -19,12 +19,9 @@
package org.apache.ambari.server.serveraction.kerberos;
import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.newCapture;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.verify;
import java.lang.reflect.Method;
import java.util.HashMap;
@@ -36,11 +33,8 @@ import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.stack.OsFamily;
import org.apache.ambari.server.utils.ShellCommandUtil;
import org.easymock.Capture;
-import org.easymock.EasyMock;
-import org.easymock.IAnswer;
-import org.easymock.IMockBuilder;
+import org.junit.Before;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Test;
import com.google.inject.AbstractModule;
@@ -49,15 +43,7 @@ import com.google.inject.Injector;
import junit.framework.Assert;
-public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTest {
-
- private static final String DEFAULT_ADMIN_PRINCIPAL = "admin/admin";
- private static final String DEFAULT_ADMIN_PASSWORD = "hadoop";
- private static final String DEFAULT_REALM = "EXAMPLE.COM";
-
- private static Injector injector;
-
- private static Method methodExecuteCommand;
+public class MITKerberosOperationHandlerTest extends KDCKerberosOperationHandlerTest {
private static final Map<String, String> KERBEROS_ENV_MAP = new HashMap<String, String>() {
{
@@ -69,105 +55,75 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
}
};
+ private static Method methodIsOpen;
+
+ private static Method methodPrincipalExists;
+
+ private static Method methodInvokeKAdmin;
+
+ private Injector injector;
+
@BeforeClass
- public static void beforeClass() throws Exception {
+ public static void beforeClassMITKerberosOperationHandlerTestC() throws Exception {
+ methodIsOpen = KerberosOperationHandler.class.getDeclaredMethod("isOpen");
+ methodPrincipalExists = MITKerberosOperationHandler.class.getDeclaredMethod("principalExists", String.class, boolean.class);
+ methodInvokeKAdmin = MITKerberosOperationHandler.class.getDeclaredMethod("invokeKAdmin", String.class);
+ }
+
+ @Before
+ public void beforeMITKerberosOperationHandlerTest() throws Exception {
injector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
- Configuration configuration = EasyMock.createNiceMock(Configuration.class);
+ Configuration configuration = createNiceMock(Configuration.class);
expect(configuration.getServerOsFamily()).andReturn("redhat6").anyTimes();
expect(configuration.getKerberosOperationRetryTimeout()).andReturn(1).anyTimes();
- replay(configuration);
- bind(Clusters.class).toInstance(EasyMock.createNiceMock(Clusters.class));
+ bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
bind(Configuration.class).toInstance(configuration);
- bind(OsFamily.class).toInstance(EasyMock.createNiceMock(OsFamily.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
}
});
-
- methodExecuteCommand = KerberosOperationHandler.class.getDeclaredMethod(
- "executeCommand",
- String[].class,
- Map.class,
- ShellCommandUtil.InteractiveHandler.class);
}
+
@Test
- public void testSetPrincipalPasswordExceptions() throws Exception {
- MITKerberosOperationHandler handler = injector.getInstance(MITKerberosOperationHandler.class);
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
+ public void testSetPrincipalPassword() throws Exception {
+ MITKerberosOperationHandler handler = createMockedHandler(methodIsOpen, methodPrincipalExists);
- try {
- handler.setPrincipalPassword(DEFAULT_ADMIN_PRINCIPAL, null);
- Assert.fail("KerberosOperationException not thrown for null password");
- } catch (Throwable t) {
- Assert.assertEquals(KerberosOperationException.class, t.getClass());
- }
+ expect(handler.isOpen()).andReturn(true).atLeastOnce();
+ expect(handler.principalExists(DEFAULT_ADMIN_PRINCIPAL, false)).andReturn(true).atLeastOnce();
+ expect(handler.principalExists(null, false)).andReturn(false).atLeastOnce();
+ expect(handler.principalExists("", false)).andReturn(false).atLeastOnce();
- try {
- handler.setPrincipalPassword(DEFAULT_ADMIN_PRINCIPAL, "");
- Assert.fail("KerberosOperationException not thrown for empty password");
- handler.createPrincipal("", "1234", false);
- Assert.fail("AmbariException not thrown for empty principal");
- } catch (Throwable t) {
- Assert.assertEquals(KerberosOperationException.class, t.getClass());
- }
+ replayAll();
+
+ Integer expected = 0;
+
+ // setPrincipalPassword should always return 0
+ Assert.assertEquals(expected, handler.setPrincipalPassword(DEFAULT_ADMIN_PRINCIPAL, null, false));
+ Assert.assertEquals(expected, handler.setPrincipalPassword(DEFAULT_ADMIN_PRINCIPAL, "", false));
try {
- handler.setPrincipalPassword(null, DEFAULT_ADMIN_PASSWORD);
- Assert.fail("KerberosOperationException not thrown for null principal");
- } catch (Throwable t) {
- Assert.assertEquals(KerberosOperationException.class, t.getClass());
+ handler.setPrincipalPassword(null, DEFAULT_ADMIN_PASSWORD, false);
+ Assert.fail("Expected KerberosPrincipalDoesNotExistException");
+ } catch (KerberosPrincipalDoesNotExistException e) {
+ // Expected...
}
try {
- handler.setPrincipalPassword("", DEFAULT_ADMIN_PASSWORD);
- Assert.fail("KerberosOperationException not thrown for empty principal");
- } catch (Throwable t) {
- Assert.assertEquals(KerberosOperationException.class, t.getClass());
+ handler.setPrincipalPassword("", DEFAULT_ADMIN_PASSWORD, false);
+ Assert.fail("Expected KerberosPrincipalDoesNotExistException");
+ } catch (KerberosPrincipalDoesNotExistException e) {
+ // Expected...
}
- }
-
- @Test(expected = KerberosPrincipalDoesNotExistException.class)
- public void testSetPrincipalPasswordPrincipalDoesNotExist() throws Exception {
- MITKerberosOperationHandler handler = createMockBuilder(MITKerberosOperationHandler.class)
- .addMockedMethod(methodExecuteCommand)
- .createNiceMock();
- injector.injectMembers(handler);
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(0).anyTimes();
- expect(result.isSuccessful()).andReturn(true).anyTimes();
- expect(result.getStderr())
- .andReturn("change_password: Principal does not exist while changing password for \"nonexistant@EXAMPLE.COM\".")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.setPrincipalPassword("nonexistant@EXAMPLE.COM", "password");
- handler.close();
+ verifyAll();
}
@Test
public void testCreateServicePrincipal_AdditionalAttributes() throws Exception {
- Method invokeKAdmin = MITKerberosOperationHandler.class.getDeclaredMethod("invokeKAdmin", String.class, String.class);
-
Capture<? extends String> query = newCapture();
- Capture<? extends String> password = newCapture();
ShellCommandUtil.Result result1 = createNiceMock(ShellCommandUtil.Result.class);
expect(result1.getStderr()).andReturn("").anyTimes();
@@ -177,72 +133,40 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
expect(result2.getStderr()).andReturn("").anyTimes();
expect(result2.getStdout()).andReturn("Key: vno 1").anyTimes();
- MITKerberosOperationHandler handler = createMockBuilder(MITKerberosOperationHandler.class)
- .addMockedMethod(invokeKAdmin)
- .createStrictMock();
+ ShellCommandUtil.Result kinitResult = createMock(ShellCommandUtil.Result.class);
+ expect(kinitResult.isSuccessful()).andReturn(true);
- expect(handler.invokeKAdmin(capture(query), anyString())).andReturn(result1).once();
- expect(handler.invokeKAdmin("get_principal " + DEFAULT_ADMIN_PRINCIPAL, null)).andReturn(result2).once();
+ MITKerberosOperationHandler handler = createMockedHandler(methodInvokeKAdmin, methodExecuteCommand);
+ expect(handler.executeCommand(anyObject(String[].class), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(kinitResult)
+ .once();
+ expect(handler.invokeKAdmin(capture(query))).andReturn(result1).once();
- replay(handler, result1, result2);
+ replayAll();
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
+ handler.open(getAdminCredentials(), DEFAULT_REALM, KERBEROS_ENV_MAP);
handler.createPrincipal(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD, false);
+ handler.close();
- verify(handler, result1, result2);
+ verifyAll();
Assert.assertTrue(query.getValue().contains(" " + KERBEROS_ENV_MAP.get(MITKerberosOperationHandler.KERBEROS_ENV_KDC_CREATE_ATTRIBUTES) + " "));
}
- @Test(expected = KerberosPrincipalAlreadyExistsException.class)
- public void testCreatePrincipalPrincipalAlreadyNotExists() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(0).anyTimes();
- expect(result.isSuccessful()).andReturn(true).anyTimes();
- expect(result.getStderr())
- .andReturn("add_principal: Principal or policy already exists while creating \"existing@EXAMPLE.COM\".")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.createPrincipal("existing@EXAMPLE.COM", "password", false);
- handler.close();
- }
@Test
- public void testCreateServicePrincipal_Exceptions() throws Exception {
- MITKerberosOperationHandler handler = new MITKerberosOperationHandler();
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
+ public void testCreateServicePrincipalExceptions() throws Exception {
+ ShellCommandUtil.Result kinitResult = createMock(ShellCommandUtil.Result.class);
+ expect(kinitResult.isSuccessful()).andReturn(true);
- try {
- handler.createPrincipal(DEFAULT_ADMIN_PRINCIPAL, null, false);
- Assert.fail("KerberosOperationException not thrown for null password");
- } catch (Throwable t) {
- Assert.assertEquals(KerberosOperationException.class, t.getClass());
- }
+ MITKerberosOperationHandler handler = createMockedHandler(methodExecuteCommand);
+ expect(handler.executeCommand(anyObject(String[].class), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(kinitResult)
+ .once();
- try {
- handler.createPrincipal(DEFAULT_ADMIN_PRINCIPAL, "", false);
- Assert.fail("KerberosOperationException not thrown for empty password");
- } catch (Throwable t) {
- Assert.assertEquals(KerberosOperationException.class, t.getClass());
- }
+ replayAll();
+
+ handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
try {
handler.createPrincipal(null, DEFAULT_ADMIN_PASSWORD, false);
@@ -257,339 +181,156 @@ public class MITKerberosOperationHandlerTest extends KerberosOperationHandlerTes
} catch (Throwable t) {
Assert.assertEquals(KerberosOperationException.class, t.getClass());
}
- }
- @Test(expected = KerberosAdminAuthenticationException.class)
- public void testTestAdministratorCredentialsIncorrectAdminPassword() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(1).anyTimes();
- expect(result.isSuccessful()).andReturn(false).anyTimes();
- expect(result.getStderr())
- .andReturn("kadmin: Incorrect password while initializing kadmin interface")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.testAdministratorCredentials();
- handler.close();
- }
-
- @Test(expected = KerberosAdminAuthenticationException.class)
- public void testTestAdministratorCredentialsIncorrectAdminPrincipal() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(1).anyTimes();
- expect(result.isSuccessful()).andReturn(false).anyTimes();
- expect(result.getStderr())
- .andReturn("kadmin: Client not found in Kerberos database while initializing kadmin interface")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.testAdministratorCredentials();
- handler.close();
- }
-
- @Test(expected = KerberosRealmException.class)
- public void testTestAdministratorCredentialsInvalidRealm() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(1).anyTimes();
- expect(result.isSuccessful()).andReturn(false).anyTimes();
- expect(result.getStderr())
- .andReturn("kadmin: Missing parameters in krb5.conf required for kadmin client while initializing kadmin interface")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.testAdministratorCredentials();
- handler.close();
- }
-
- @Test(expected = KerberosRealmException.class)
- public void testTestAdministratorCredentialsInvalidRealm2() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(1).anyTimes();
- expect(result.isSuccessful()).andReturn(false).anyTimes();
- expect(result.getStderr())
- .andReturn("kadmin: Cannot find KDC for requested realm while initializing kadmin interface")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.testAdministratorCredentials();
- handler.close();
+ verifyAll();
}
@Test(expected = KerberosKDCConnectionException.class)
- public void testTestAdministratorCredentialsKDCConnectionException() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(1).anyTimes();
- expect(result.isSuccessful()).andReturn(false).anyTimes();
- expect(result.getStderr())
- .andReturn("kadmin: Cannot contact any KDC for requested realm while initializing kadmin interface")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
+ public void testKDCConnectionException() throws Exception {
+ ShellCommandUtil.Result kinitResult = createMock(ShellCommandUtil.Result.class);
+ expect(kinitResult.isSuccessful()).andReturn(true).anyTimes();
+
+ ShellCommandUtil.Result kadminResult = createMock(ShellCommandUtil.Result.class);
+ expect(kadminResult.getExitCode()).andReturn(1).anyTimes();
+ expect(kadminResult.isSuccessful()).andReturn(false).anyTimes();
+ expect(kadminResult.getStderr())
+ .andReturn("kadmin: Cannot contact any KDC for requested realm while initializing kadmin interface")
+ .anyTimes();
+ expect(kadminResult.getStdout())
+ .andReturn("Authenticating as principal admin/admin with password.")
+ .anyTimes();
+
+ MITKerberosOperationHandler handler = createMockedHandler(methodExecuteCommand);
+ expect(handler.executeCommand(anyObject(String[].class), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(kinitResult)
+ .once();
+ expect(handler.executeCommand(anyObject(String[].class), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(kadminResult)
+ .once();
replayAll();
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
+ handler.open(getAdminCredentials(), DEFAULT_REALM, KERBEROS_ENV_MAP);
handler.testAdministratorCredentials();
handler.close();
+
+ verifyAll();
}
@Test(expected = KerberosKDCConnectionException.class)
public void testTestAdministratorCredentialsKDCConnectionException2() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(1).anyTimes();
- expect(result.isSuccessful()).andReturn(false).anyTimes();
- expect(result.getStderr())
- .andReturn("kadmin: Cannot resolve network address for admin server in requested realm while initializing kadmin interface")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
+ ShellCommandUtil.Result kinitResult = createMock(ShellCommandUtil.Result.class);
+ expect(kinitResult.isSuccessful()).andReturn(true).anyTimes();
+
+ ShellCommandUtil.Result kadminResult = createMock(ShellCommandUtil.Result.class);
+ expect(kadminResult.getExitCode()).andReturn(1).anyTimes();
+ expect(kadminResult.isSuccessful()).andReturn(false).anyTimes();
+ expect(kadminResult.getStderr())
+ .andReturn("kadmin: Cannot resolve network address for admin server in requested realm while initializing kadmin interface")
+ .anyTimes();
+ expect(kadminResult.getStdout())
+ .andReturn("Authenticating as principal admin/admin with password.")
+ .anyTimes();
+
+ MITKerberosOperationHandler handler = createMockedHandler(methodExecuteCommand);
+ expect(handler.executeCommand(anyObject(String[].class), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(kinitResult)
+ .once();
+ expect(handler.executeCommand(anyObject(String[].class), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(kadminResult)
+ .once();
replayAll();
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
+ handler.open(getAdminCredentials(), DEFAULT_REALM, KERBEROS_ENV_MAP);
handler.testAdministratorCredentials();
handler.close();
- }
-
- @Test
- public void testTestAdministratorCredentialsNotFound() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(0).anyTimes();
- expect(result.isSuccessful()).andReturn(true).anyTimes();
- expect(result.getStderr())
- .andReturn("get_principal: Principal does not exist while retrieving \"admin/admi@EXAMPLE.COM\".")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- Assert.assertFalse(handler.testAdministratorCredentials());
- handler.close();
+ verifyAll();
}
- @Test
- public void testTestAdministratorCredentialsSuccess() throws Exception {
- MITKerberosOperationHandler handler = createMock();
-
- expect(handler.executeCommand(anyObject(String[].class), EasyMock.anyObject(), anyObject(MITKerberosOperationHandler.InteractivePasswordHandler.class)))
- .andAnswer(new IAnswer<ShellCommandUtil.Result>() {
- @Override
- public ShellCommandUtil.Result answer() throws Throwable {
- ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
-
- expect(result.getExitCode()).andReturn(0).anyTimes();
- expect(result.isSuccessful()).andReturn(true).anyTimes();
- expect(result.getStderr())
- .andReturn("")
- .anyTimes();
- expect(result.getStdout())
- .andReturn("Authenticating as principal admin/admin with password.\n" +
- "Principal: admin/admin@EXAMPLE.COM\n" +
- "Expiration date: [never]\n" +
- "Last password change: Thu Jan 08 13:09:52 UTC 2015\n" +
- "Password expiration date: [none]\n" +
- "Maximum ticket life: 1 day 00:00:00\n" +
- "Maximum renewable life: 0 days 00:00:00\n" +
- "Last modified: Thu Jan 08 13:09:52 UTC 2015 (root/admin@EXAMPLE.COM)\n" +
- "Last successful authentication: [never]\n" +
- "Last failed authentication: [never]\n" +
- "Failed password attempts: 0\n" +
- "Number of keys: 6\n" +
- "Key: vno 1, aes256-cts-hmac-sha1-96, no salt\n" +
- "Key: vno 1, aes128-cts-hmac-sha1-96, no salt\n" +
- "Key: vno 1, des3-cbc-sha1, no salt\n" +
- "Key: vno 1, arcfour-hmac, no salt\n" +
- "Key: vno 1, des-hmac-sha1, no salt\n" +
- "Key: vno 1, des-cbc-md5, no salt\n" +
- "MKey: vno 1\n" +
- "Attributes:\n" +
- "Policy: [none]")
- .anyTimes();
-
- replay(result);
- return result;
- }
- });
-
- replayAll();
-
- handler.open(new PrincipalKeyCredential(DEFAULT_ADMIN_PRINCIPAL, DEFAULT_ADMIN_PASSWORD), DEFAULT_REALM, KERBEROS_ENV_MAP);
- handler.testAdministratorCredentials();
- handler.close();
+ @Override
+ protected MITKerberosOperationHandler createMockedHandler(Method... mockedMethods) {
+ MITKerberosOperationHandler handler = createMockBuilder(MITKerberosOperationHandler.class)
+ .addMockedMethods(mockedMethods)
+ .createMock();
+ injector.injectMembers(handler);
+ return handler;
}
- @Test
- @Ignore
- public void testTestAdministratorCredentialsLive() throws KerberosOperationException {
- MITKerberosOperationHandler handler = new MITKerberosOperationHandler();
- String principal = System.getProperty("principal");
- String password = System.getProperty("password");
- String realm = System.getProperty("realm");
-
- if (principal == null) {
- principal = DEFAULT_ADMIN_PRINCIPAL;
- }
-
- if (password == null) {
- password = DEFAULT_ADMIN_PASSWORD;
- }
-
- if (realm == null) {
- realm = DEFAULT_REALM;
- }
-
- PrincipalKeyCredential credentials = new PrincipalKeyCredential(principal, password);
-
- handler.open(credentials, realm, KERBEROS_ENV_MAP);
- handler.testAdministratorCredentials();
- handler.close();
+ @Override
+ protected Map<String, String> getKerberosEnv() {
+ return KERBEROS_ENV_MAP;
}
- @Test
- public void testInteractivePasswordHandler() {
- MITKerberosOperationHandler.InteractivePasswordHandler handler = new MITKerberosOperationHandler.InteractivePasswordHandler("admin_password", "user_password");
-
- handler.start();
- Assert.assertEquals("admin_password", handler.getResponse("password"));
- Assert.assertFalse(handler.done());
- Assert.assertEquals("user_password", handler.getResponse("password"));
- Assert.assertFalse(handler.done());
- Assert.assertEquals("user_password", handler.getResponse("password"));
- Assert.assertTrue(handler.done());
-
- // Test restarting
- handler.start();
- Assert.assertEquals("admin_password", handler.getResponse("password"));
- Assert.assertFalse(handler.done());
- Assert.assertEquals("user_password", handler.getResponse("password"));
- Assert.assertFalse(handler.done());
- Assert.assertEquals("user_password", handler.getResponse("password"));
- Assert.assertTrue(handler.done());
+ @Override
+ protected void setupPrincipalAlreadyExists(KerberosOperationHandler handler, boolean service) throws Exception {
+ ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
+ expect(result.getExitCode()).andReturn(0).anyTimes();
+ expect(result.isSuccessful()).andReturn(true).anyTimes();
+ expect(result.getStderr())
+ .andReturn(String.format("add_principal: Principal or policy already exists while creating \"%s@EXAMPLE.COM\".", (service) ? "service/host" : "user"))
+ .anyTimes();
+ expect(result.getStdout())
+ .andReturn("Authenticating as principal admin/admin with password.")
+ .anyTimes();
+
+ expect(handler.executeCommand(arrayContains(new String[]{"kadmin", "add_principal"}), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(result)
+ .anyTimes();
}
- private MITKerberosOperationHandler createMock(){
- return createMock(false);
+ @Override
+ protected void setupPrincipalDoesNotExist(KerberosOperationHandler handler, boolean service) throws Exception {
+ ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
+ expect(result.getExitCode()).andReturn(0).anyTimes();
+ expect(result.isSuccessful()).andReturn(true).anyTimes();
+ expect(result.getStderr())
+ .andReturn(String.format("get_principal: Principal does not exist while retrieving \"%s@EXAMPLE.COM\".", (service) ? "service/host" : "user"))
+ .anyTimes();
+ expect(result.getStdout())
+ .andReturn("Authenticating as principal admin/admin with password.")
+ .anyTimes();
+
+ expect(handler.executeCommand(arrayContains(new String[]{"kadmin", "get_principal"}), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(result)
+ .anyTimes();
}
- private MITKerberosOperationHandler createMock(boolean strict) {
- IMockBuilder<MITKerberosOperationHandler> mockBuilder = createMockBuilder(MITKerberosOperationHandler.class)
- .addMockedMethod(methodExecuteCommand);
- MITKerberosOperationHandler result;
- if(strict){
- result = mockBuilder.createStrictMock();
- } else {
- result = mockBuilder.createNiceMock();
- }
- injector.injectMembers(result);
- return result;
+ @Override
+ protected void setupPrincipalExists(KerberosOperationHandler handler, boolean service) throws Exception {
+ ShellCommandUtil.Result result = createMock(ShellCommandUtil.Result.class);
+ expect(result.getExitCode()).andReturn(0).anyTimes();
+ expect(result.isSuccessful()).andReturn(true).anyTimes();
+ expect(result.getStderr())
+ .andReturn("")
+ .anyTimes();
+ expect(result.getStdout())
+ .andReturn(String.format("Authenticating as principal admin/admin with password.\n" +
+ "Principal: %s@EXAMPLE.COM\n" +
+ "Expiration date: [never]\n" +
+ "Last password change: Thu Jan 08 13:09:52 UTC 2015\n" +
+ "Password expiration date: [none]\n" +
+ "Maximum ticket life: 1 day 00:00:00\n" +
+ "Maximum renewable life: 0 days 00:00:00\n" +
+ "Last modified: Thu Jan 08 13:09:52 UTC 2015 (root/admin@EXAMPLE.COM)\n" +
+ "Last successful authentication: [never]\n" +
+ "Last failed authentication: [never]\n" +
+ "Failed password attempts: 0\n" +
+ "Number of keys: 6\n" +
+ "Key: vno 1, aes256-cts-hmac-sha1-96, no salt\n" +
+ "Key: vno 1, aes128-cts-hmac-sha1-96, no salt\n" +
+ "Key: vno 1, des3-cbc-sha1, no salt\n" +
+ "Key: vno 1, arcfour-hmac, no salt\n" +
+ "Key: vno 1, des-hmac-sha1, no salt\n" +
+ "Key: vno 1, des-cbc-md5, no salt\n" +
+ "MKey: vno 1\n" +
+ "Attributes:\n" +
+ "Policy: [none]", (service) ? "service/host" : "user"))
+ .anyTimes();
+
+ expect(handler.executeCommand(arrayContains(new String[]{"kadmin", "get_principal"}), anyObject(Map.class), anyObject(KDCKerberosOperationHandler.InteractivePasswordHandler.class)))
+ .andReturn(result)
+ .anyTimes();
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java
new file mode 100644
index 0000000..1bad219
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/CreateAndConfigureActionTest.java
@@ -0,0 +1,357 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.H2DatabaseCleaner;
+import org.apache.ambari.server.ServiceComponentNotFoundException;
+import org.apache.ambari.server.ServiceNotFoundException;
+import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleCommandFactory;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.orm.GuiceJpaInitializer;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.OrmTestHelper;
+import org.apache.ambari.server.orm.dao.RequestDAO;
+import org.apache.ambari.server.orm.dao.UpgradeDAO;
+import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
+import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.UpgradeEntity;
+import org.apache.ambari.server.orm.entities.UpgradeHistoryEntity;
+import org.apache.ambari.server.serveraction.ServerAction;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigFactory;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.Host;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceComponent;
+import org.apache.ambari.server.state.ServiceComponentFactory;
+import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceComponentHostFactory;
+import org.apache.ambari.server.state.ServiceFactory;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.State;
+import org.apache.ambari.server.state.stack.upgrade.ConfigUpgradeChangeDefinition.ConfigurationKeyValue;
+import org.apache.ambari.server.state.stack.upgrade.CreateAndConfigureTask;
+import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
+import org.apache.commons.lang.StringUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.gson.Gson;
+import com.google.inject.Guice;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+
+/**
+ * Tests upgrade-related server side actions
+ */
+public class CreateAndConfigureActionTest {
+
+ @Inject
+ private Injector m_injector;
+
+ @Inject
+ private OrmTestHelper m_helper;
+
+ @Inject
+ private HostRoleCommandFactory hostRoleCommandFactory;
+
+ @Inject
+ private ServiceFactory serviceFactory;
+
+ @Inject
+ private ConfigHelper m_configHelper;
+
+ @Inject
+ private Clusters clusters;
+
+ @Inject
+ private ConfigFactory configFactory;
+
+ @Inject
+ private CreateAndConfigureAction action;
+
+ @Inject
+ private RequestDAO requestDAO;
+
+ @Inject
+ private UpgradeDAO upgradeDAO;
+
+ @Inject
+ private ServiceComponentFactory serviceComponentFactory;
+
+ @Inject
+ private ServiceComponentHostFactory serviceComponentHostFactory;
+
+ private RepositoryVersionEntity repoVersion2110;
+ private RepositoryVersionEntity repoVersion2111;
+ private RepositoryVersionEntity repoVersion2200;
+
+ private final Map<String, Map<String, String>> NO_ATTRIBUTES = new HashMap<>();
+
+ @Before
+ public void setup() throws Exception {
+ m_injector = Guice.createInjector(new InMemoryDefaultTestModule());
+ m_injector.getInstance(GuiceJpaInitializer.class);
+ m_injector.injectMembers(this);
+
+ repoVersion2110 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"), "2.1.1.0-1234");
+ repoVersion2111 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.1.1"), "2.1.1.1-5678");
+ repoVersion2200 = m_helper.getOrCreateRepositoryVersion(new StackId("HDP-2.2.0"), "2.2.0.0-1234");
+
+ makeUpgradeCluster();
+ }
+
+ @After
+ public void teardown() throws Exception {
+ H2DatabaseCleaner.clearDatabase(m_injector.getProvider(EntityManager.class).get());
+ }
+
+
+ /**
+ * Tests that a new configuration is created when upgrading across stack when
+ * there is no existing configuration with the correct target stack.
+ *
+ * @throws Exception
+ */
+ @Test
+ public void testNewConfigCreatedWhenUpgradingWithoutChaningStack() throws Exception {
+ Cluster c = clusters.getCluster("c1");
+ assertEquals(1, c.getConfigsByType("zoo.cfg").size());
+
+ Map<String, String> properties = new HashMap<String, String>() {
+ {
+ put("initLimit", "10");
+ }
+ };
+
+ Config config = createConfig(c, "zoo.cfg", "version2", properties);
+
+ c.addDesiredConfig("user", Collections.singleton(config));
+ assertEquals(2, c.getConfigsByType("zoo.cfg").size());
+
+ List<ConfigurationKeyValue> configurations = new ArrayList<>();
+ ConfigurationKeyValue keyValue = new ConfigurationKeyValue();
+ configurations.add(keyValue);
+ keyValue.key = "initLimit";
+ keyValue.value = "11";
+ c.setCurrentStackVersion(repoVersion2110.getStackId());
+ c.setDesiredStackVersion(repoVersion2111.getStackId());
+
+ createUpgrade(c, repoVersion2111);
+
+ Map<String, String> commandParams = new HashMap<>();
+ commandParams.put("clusterName", "c1");
+ commandParams.put(CreateAndConfigureTask.PARAMETER_CONFIG_TYPE, "zoo.cfg");
+ commandParams.put(CreateAndConfigureTask.PARAMETER_KEY_VALUE_PAIRS, new Gson().toJson(configurations));
+
+ ExecutionCommand executionCommand = getExecutionCommand(commandParams);
+ HostRoleCommand hostRoleCommand = hostRoleCommandFactory.create(null, null,
+ null, null);
+
+ hostRoleCommand.setExecutionCommandWrapper(new ExecutionCommandWrapper(
+ executionCommand));
+
+ action.setExecutionCommand(executionCommand);
+ action.setHostRoleCommand(hostRoleCommand);
+
+ CommandReport report = action.execute(null);
+ assertNotNull(report);
+
+ assertEquals(3, c.getConfigsByType("zoo.cfg").size());
+
+ config = c.getDesiredConfigByType("zoo.cfg");
+ assertNotNull(config);
+ assertFalse(StringUtils.equals("version2", config.getTag()));
+ assertEquals("11", config.getProperties().get("initLimit"));
+ }
+
+ /**
+ * Creates a cluster using {@link #repoVersion2110} with ZooKeeper installed.
+ *
+ * @throws Exception
+ */
+ private void makeUpgradeCluster() throws Exception {
+ String clusterName = "c1";
+ String hostName = "h1";
+
+ clusters.addCluster(clusterName, repoVersion2110.getStackId());
+
+ Cluster c = clusters.getCluster(clusterName);
+
+ // add a host component
+ clusters.addHost(hostName);
+ Host host = clusters.getHost(hostName);
+ Map<String, String> hostAttributes = new HashMap<>();
+ hostAttributes.put("os_family", "redhat");
+ hostAttributes.put("os_release_version", "6");
+ host.setHostAttributes(hostAttributes);
+
+ clusters.mapHostToCluster(hostName, clusterName);
+
+ // !!! very important, otherwise the loops that walk the list of installed
+ // service properties will not run!
+ Service zk = installService(c, "ZOOKEEPER", repoVersion2110);
+ addServiceComponent(c, zk, "ZOOKEEPER_SERVER");
+ addServiceComponent(c, zk, "ZOOKEEPER_CLIENT");
+ createNewServiceComponentHost(c, "ZOOKEEPER", "ZOOKEEPER_SERVER", hostName);
+ createNewServiceComponentHost(c, "ZOOKEEPER", "ZOOKEEPER_CLIENT", hostName);
+
+ Map<String, String> properties = new HashMap<String, String>() {
+ {
+ put("initLimit", "10");
+ }
+ };
+
+ Config config = createConfig(c, "zoo.cfg", "version1", properties);
+
+ c.addDesiredConfig("user", Collections.singleton(config));
+
+ // verify that our configs are there
+ String tickTime = m_configHelper.getPropertyValueFromStackDefinitions(c, "zoo.cfg", "tickTime");
+ assertNotNull(tickTime);
+ }
+
+ /**
+ * Installs a service in the cluster.
+ *
+ * @param cluster
+ * @param serviceName
+ * @return
+ * @throws AmbariException
+ */
+ private Service installService(Cluster cluster, String serviceName,
+ RepositoryVersionEntity repositoryVersion) throws AmbariException {
+ Service service = null;
+
+ try {
+ service = cluster.getService(serviceName);
+ } catch (ServiceNotFoundException e) {
+ service = serviceFactory.createNew(cluster, null, null, serviceName, "", repositoryVersion);
+ cluster.addService(service);
+ }
+
+ return service;
+ }
+
+ private ServiceComponent addServiceComponent(Cluster cluster, Service service,
+ String componentName) throws AmbariException {
+ ServiceComponent serviceComponent = null;
+ try {
+ serviceComponent = service.getServiceComponent(componentName);
+ } catch (ServiceComponentNotFoundException e) {
+ serviceComponent = serviceComponentFactory.createNew(service, componentName);
+ service.addServiceComponent(serviceComponent);
+ serviceComponent.setDesiredState(State.INSTALLED);
+ }
+
+ return serviceComponent;
+ }
+
+ private ServiceComponentHost createNewServiceComponentHost(Cluster cluster, String serviceName,
+ String svcComponent, String hostName) throws AmbariException {
+ Assert.assertNotNull(cluster.getConfigGroups());
+ Service s = cluster.getService(serviceName);
+ ServiceComponent sc = addServiceComponent(cluster, s, svcComponent);
+
+ ServiceComponentHost sch = serviceComponentHostFactory.createNew(sc, hostName);
+
+ sc.addServiceComponentHost(sch);
+ sch.setDesiredState(State.INSTALLED);
+ sch.setState(State.INSTALLED);
+ return sch;
+ }
+
+ /**
+ * Creates an upgrade and associates it with the cluster.
+ */
+ private UpgradeEntity createUpgrade(Cluster cluster, RepositoryVersionEntity repositoryVersion)
+ throws Exception {
+
+ // create some entities for the finalize action to work with for patch
+ // history
+ RequestEntity requestEntity = new RequestEntity();
+ requestEntity.setClusterId(cluster.getClusterId());
+ requestEntity.setRequestId(1L);
+ requestEntity.setStartTime(System.currentTimeMillis());
+ requestEntity.setCreateTime(System.currentTimeMillis());
+ requestDAO.create(requestEntity);
+
+ UpgradeEntity upgradeEntity = new UpgradeEntity();
+ upgradeEntity.setId(1L);
+ upgradeEntity.setClusterId(cluster.getClusterId());
+ upgradeEntity.setRequestEntity(requestEntity);
+ upgradeEntity.setUpgradePackage("");
+ upgradeEntity.setRepositoryVersion(repositoryVersion);
+ upgradeEntity.setUpgradeType(UpgradeType.NON_ROLLING);
+
+ Map<String, Service> services = cluster.getServices();
+ for (String serviceName : services.keySet()) {
+ Service service = services.get(serviceName);
+ Map<String, ServiceComponent> components = service.getServiceComponents();
+ for (String componentName : components.keySet()) {
+ UpgradeHistoryEntity history = new UpgradeHistoryEntity();
+ history.setUpgrade(upgradeEntity);
+ history.setServiceName(serviceName);
+ history.setComponentName(componentName);
+ history.setFromRepositoryVersion(service.getDesiredRepositoryVersion());
+ history.setTargetRepositoryVersion(repositoryVersion);
+ upgradeEntity.addHistory(history);
+ }
+ }
+
+ upgradeDAO.create(upgradeEntity);
+ cluster.setUpgradeEntity(upgradeEntity);
+ return upgradeEntity;
+ }
+
+ private ExecutionCommand getExecutionCommand(Map<String, String> commandParams) {
+ ExecutionCommand executionCommand = new ExecutionCommand();
+ executionCommand.setClusterName("c1");
+ executionCommand.setCommandParams(commandParams);
+ executionCommand.setRoleParams(new HashMap<String, String>());
+ executionCommand.getRoleParams().put(ServerAction.ACTION_USER_NAME, "username");
+
+ return executionCommand;
+ }
+
+ private Config createConfig(Cluster cluster, String type, String tag,
+ Map<String, String> properties) {
+ return configFactory.createNew(cluster, type, tag, properties,
+ NO_ATTRIBUTES);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosActionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosActionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosActionTest.java
index 05dd805..4387473 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosActionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/serveraction/upgrades/PreconfigureKerberosActionTest.java
@@ -70,8 +70,11 @@ import org.apache.ambari.server.metadata.CachedRoleCommandOrderProvider;
import org.apache.ambari.server.metadata.RoleCommandOrderProvider;
import org.apache.ambari.server.orm.DBAccessor;
import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.dao.HostDAO;
import org.apache.ambari.server.orm.dao.HostRoleCommandDAO;
import org.apache.ambari.server.orm.dao.KerberosPrincipalDAO;
+import org.apache.ambari.server.orm.entities.HostEntity;
+import org.apache.ambari.server.orm.entities.KerberosKeytabEntity;
import org.apache.ambari.server.orm.entities.RepositoryVersionEntity;
import org.apache.ambari.server.orm.entities.UpgradeEntity;
import org.apache.ambari.server.security.encryption.CredentialStoreService;
@@ -179,6 +182,12 @@ public class PreconfigureKerberosActionTest extends EasyMockSupport {
Injector injector = getInjector();
+ HostDAO hostDAO = injector.getInstance(HostDAO.class);
+ EntityManager entityManager = injector.getInstance(EntityManager.class);
+
+ expect(hostDAO.findByName(anyString())).andReturn(createNiceMock(HostEntity.class)).anyTimes();
+ expect(entityManager.find(eq(KerberosKeytabEntity.class), anyString())).andReturn(createNiceMock(KerberosKeytabEntity.class)).anyTimes();
+
ExecutionCommand executionCommand = createMockExecutionCommand(getDefaultCommandParams());
UpgradeEntity upgradeProgress = createMock(UpgradeEntity.class);
@@ -590,6 +599,7 @@ public class PreconfigureKerberosActionTest extends EasyMockSupport {
bind(Clusters.class).toInstance(createMock(Clusters.class));
bind(StackAdvisorHelper.class).toInstance(createMock(StackAdvisorHelper.class));
bind(ConfigHelper.class).toInstance(createMock(ConfigHelper.class));
+ bind(HostDAO.class).toInstance(createMock(HostDAO.class));
}
});
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
index dbdd043..13c32cf 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/ServiceModuleTest.java
@@ -499,6 +499,36 @@ public class ServiceModuleTest {
}
@Test
+ public void testResolve_ServerActionDirectory() throws Exception {
+ File serverActions = new File("server_actions");
+
+ // check directory specified in child only
+ ServiceInfo info = new ServiceInfo();
+ ServiceInfo parentInfo = new ServiceInfo();
+ ServiceModule child = createServiceModule(info);
+ ServiceModule parent = createServiceModule(parentInfo);
+ child.getModuleInfo().setServerActionsFolder(serverActions);
+ resolveService(child, parent);
+ assertEquals(serverActions.getPath(), child.getModuleInfo().getServerActionsFolder().getPath());
+
+ // check directory specified in parent only
+ child = createServiceModule(info);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setServerActionsFolder(serverActions);
+ resolveService(child, parent);
+ assertEquals(serverActions.getPath(), child.getModuleInfo().getServerActionsFolder().getPath());
+
+ // check directory set in both
+ info.setServerActionsFolder(serverActions);
+ child = createServiceModule(info);
+ child.getModuleInfo().setServerActionsFolder(serverActions);
+ parent = createServiceModule(parentInfo);
+ parent.getModuleInfo().setServerActionsFolder(new File("other"));
+ resolveService(child, parent);
+ assertEquals(serverActions.getPath(), child.getModuleInfo().getServerActionsFolder().getPath());
+ }
+
+ @Test
public void testResolve_CustomCommands() throws Exception {
List<CustomCommandDefinition> customCommands = new ArrayList<>();
CustomCommandDefinition cmd1 = new CustomCommandDefinition();
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
index f3975be..591135b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/StackManagerExtensionTest.java
@@ -142,6 +142,9 @@ public class StackManagerExtensionTest {
File checks = oozie.getChecksFolder();
assertNotNull(checks);
assertTrue("Checks dir is " + checks.getPath(), checks.getPath().contains("extensions/EXT/0.1/services/OOZIE2/checks"));
+ File serverActions = oozie.getServerActionsFolder();
+ assertNotNull(serverActions);
+ assertTrue("Server actions dir is " + serverActions.getPath(), serverActions.getPath().contains("extensions/EXT/0.1/services/OOZIE2/server_actions"));
List<ThemeInfo> themes = oozie.getThemes();
assertNotNull(themes);
assertTrue("Number of themes is " + themes.size(), themes.size() == 1);
@@ -160,6 +163,9 @@ public class StackManagerExtensionTest {
checks = oozie.getChecksFolder();
assertNotNull(checks);
assertTrue("Checks dir is " + checks.getPath(), checks.getPath().contains("extensions/EXT/0.1/services/OOZIE2/checks"));
+ serverActions = oozie.getServerActionsFolder();
+ assertNotNull(serverActions);
+ assertTrue("Server actions dir is " + serverActions.getPath(), serverActions.getPath().contains("extensions/EXT/0.1/services/OOZIE2/server_actions"));
themes = oozie.getThemes();
assertNotNull(themes);
assertTrue("Number of themes is " + themes.size(), themes.size() == 0);
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index ff28eb7..158c47d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -22,6 +22,8 @@ import static org.easymock.EasyMock.createStrictMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import java.sql.SQLException;
import java.util.ArrayList;
@@ -1217,4 +1219,49 @@ public class ConfigHelperTest {
verify(mockAmbariMetaInfo, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2);
}
}
+
+ public static class RunWithoutModules {
+ @Test
+ public void nullsAreEqual() {
+ assertTrue(ConfigHelper.valuesAreEqual(null, null));
+ }
+
+ @Test
+ public void equalStringsAreEqual() {
+ assertTrue(ConfigHelper.valuesAreEqual("asdf", "asdf"));
+ assertTrue(ConfigHelper.valuesAreEqual("qwerty", "qwerty"));
+ }
+
+ @Test
+ public void nullIsNotEqualWithNonNull() {
+ assertFalse(ConfigHelper.valuesAreEqual(null, "asdf"));
+ assertFalse(ConfigHelper.valuesAreEqual("asdf", null));
+ }
+
+ @Test
+ public void equalNumbersInDifferentFormsAreEqual() {
+ assertTrue(ConfigHelper.valuesAreEqual("1.234", "1.2340"));
+ assertTrue(ConfigHelper.valuesAreEqual("12.34", "1.234e1"));
+ assertTrue(ConfigHelper.valuesAreEqual("123L", "123l"));
+ assertTrue(ConfigHelper.valuesAreEqual("-1.234", "-1.2340"));
+ assertTrue(ConfigHelper.valuesAreEqual("-12.34", "-1.234e1"));
+ assertTrue(ConfigHelper.valuesAreEqual("-123L", "-123l"));
+ assertTrue(ConfigHelper.valuesAreEqual("1f", "1.0f"));
+ assertTrue(ConfigHelper.valuesAreEqual("0", "000"));
+
+ // these are treated as different by NumberUtils (due to different types not being equal)
+ assertTrue(ConfigHelper.valuesAreEqual("123", "123L"));
+ assertTrue(ConfigHelper.valuesAreEqual("0", "0.0"));
+ }
+
+ @Test
+ public void differentNumbersAreNotEqual() {
+ assertFalse(ConfigHelper.valuesAreEqual("1.234", "1.2341"));
+ assertFalse(ConfigHelper.valuesAreEqual("123L", "124L"));
+ assertFalse(ConfigHelper.valuesAreEqual("-1.234", "1.234"));
+ assertFalse(ConfigHelper.valuesAreEqual("-123L", "123L"));
+ assertFalse(ConfigHelper.valuesAreEqual("-1.234", "-1.2341"));
+ assertFalse(ConfigHelper.valuesAreEqual("-123L", "-124L"));
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
index 3ec6943..3056dd1 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertReceivedListenerTest.java
@@ -30,8 +30,8 @@ import javax.persistence.EntityManager;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.H2DatabaseCleaner;
-import org.apache.ambari.server.controller.RootServiceResponseFactory.Components;
-import org.apache.ambari.server.controller.RootServiceResponseFactory.Services;
+import org.apache.ambari.server.controller.RootComponent;
+import org.apache.ambari.server.controller.RootService;
import org.apache.ambari.server.events.AlertReceivedEvent;
import org.apache.ambari.server.events.AlertStateChangeEvent;
import org.apache.ambari.server.events.listeners.alerts.AlertReceivedListener;
@@ -335,8 +335,8 @@ public class AlertReceivedListenerTest {
@Test
public void testAgentAlertFromInvalidHost() {
String definitionName = ALERT_DEFINITION + "1";
- String serviceName = Services.AMBARI.name();
- String componentName = Components.AMBARI_AGENT.name();
+ String serviceName = RootService.AMBARI.name();
+ String componentName = RootComponent.AMBARI_AGENT.name();
Alert alert = new Alert(definitionName, null, serviceName, componentName, HOST1,
AlertState.OK);
@@ -374,8 +374,8 @@ public class AlertReceivedListenerTest {
@Test
public void testAmbariServerValidAlerts() {
String definitionName = ALERT_DEFINITION + "1";
- String serviceName = Services.AMBARI.name();
- String componentName = Components.AMBARI_SERVER.name();
+ String serviceName = RootService.AMBARI.name();
+ String componentName = RootComponent.AMBARI_SERVER.name();
Alert alert = new Alert(definitionName, null, serviceName, componentName, HOST1,
AlertState.OK);
@@ -415,8 +415,8 @@ public class AlertReceivedListenerTest {
@Test
public void testMissingClusterAndInvalidHost() {
String definitionName = ALERT_DEFINITION + "1";
- String serviceName = Services.AMBARI.name();
- String componentName = Components.AMBARI_AGENT.name();
+ String serviceName = RootService.AMBARI.name();
+ String componentName = RootComponent.AMBARI_AGENT.name();
Alert alert1 = new Alert(definitionName, null, serviceName, componentName, HOST1,
AlertState.OK);
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
index bc8222c..c3db717 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/alerts/AlertStateChangedEventTest.java
@@ -24,7 +24,7 @@ import java.util.List;
import java.util.Set;
import org.apache.ambari.server.H2DatabaseCleaner;
-import org.apache.ambari.server.controller.RootServiceResponseFactory.Services;
+import org.apache.ambari.server.controller.RootService;
import org.apache.ambari.server.events.AggregateAlertRecalculateEvent;
import org.apache.ambari.server.events.AlertEvent;
import org.apache.ambari.server.events.AlertStateChangeEvent;
@@ -501,7 +501,7 @@ public class AlertStateChangedEventTest extends EasyMockSupport {
// create the definition for the AMBARI service
AlertDefinitionEntity definition = createNiceMock(AlertDefinitionEntity.class);
EasyMock.expect(definition.getDefinitionId()).andReturn(1L).anyTimes();
- EasyMock.expect(definition.getServiceName()).andReturn(Services.AMBARI.name()).anyTimes();
+ EasyMock.expect(definition.getServiceName()).andReturn(RootService.AMBARI.name()).anyTimes();
EasyMock.expect(definition.getLabel()).andReturn("ambari-foo-alert").anyTimes();
EasyMock.expect(definition.getDescription()).andReturn("Ambari Foo Alert").anyTimes();
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
index 8433518..9fe6146 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/repository/VersionDefinitionTest.java
@@ -28,6 +28,7 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.lang.reflect.Field;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -39,7 +40,10 @@ import org.apache.ambari.server.state.RepositoryType;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.ServiceInfo;
import org.apache.ambari.server.state.StackInfo;
+import org.apache.ambari.server.state.stack.RepoTag;
import org.apache.ambari.server.state.stack.RepositoryXml;
+import org.apache.ambari.server.state.stack.RepositoryXml.Os;
+import org.apache.ambari.server.state.stack.RepositoryXml.Repo;
import org.apache.commons.io.FileUtils;
import org.junit.Test;
@@ -185,15 +189,41 @@ public class VersionDefinitionTest {
public void testSerialization() throws Exception {
File f = new File("src/test/resources/version_definition_test_all_services.xml");
-
VersionDefinitionXml xml = VersionDefinitionXml.load(f.toURI().toURL());
-
String xmlString = xml.toXml();
-
xml = VersionDefinitionXml.load(xmlString);
assertNotNull(xml.release.build);
assertEquals("1234", xml.release.build);
+
+ f = new File("src/test/resources/version_definition_with_tags.xml");
+ xml = VersionDefinitionXml.load(f.toURI().toURL());
+ xmlString = xml.toXml();
+
+ xml = VersionDefinitionXml.load(xmlString);
+
+ assertEquals(2, xml.repositoryInfo.getOses().size());
+ List<Repo> repos = null;
+ for (Os os : xml.repositoryInfo.getOses()) {
+ if (os.getFamily().equals("redhat6")) {
+ repos = os.getRepos();
+ }
+ }
+ assertNotNull(repos);
+ assertEquals(3, repos.size());
+
+ Repo found = null;
+ for (Repo repo : repos) {
+ if (repo.getRepoName().equals("HDP-GPL")) {
+ found = repo;
+ break;
+ }
+ }
+
+ assertNotNull(found);
+ assertNotNull(found.getTags());
+ assertEquals(1, found.getTags().size());
+ assertEquals(RepoTag.GPL, found.getTags().iterator().next());
}
@@ -425,11 +455,52 @@ public class VersionDefinitionTest {
summary = xml.getClusterSummary(cluster);
assertEquals(0, summary.getAvailableServiceNames().size());
+ f = new File("src/test/resources/version_definition_test_maint.xml");
+ xml = VersionDefinitionXml.load(f.toURI().toURL());
+ xml.release.repositoryType = RepositoryType.STANDARD;
+ xml.availableServices = Collections.emptyList();
+ summary = xml.getClusterSummary(cluster);
+ assertEquals(2, summary.getAvailableServiceNames().size());
+
f = new File("src/test/resources/version_definition_test_maint_partial.xml");
xml = VersionDefinitionXml.load(f.toURI().toURL());
summary = xml.getClusterSummary(cluster);
assertEquals(1, summary.getAvailableServiceNames().size());
+ }
+
+ @Test
+ public void testAvailableBuildVersion() throws Exception {
+ Cluster cluster = createNiceMock(Cluster.class);
+ RepositoryVersionEntity repositoryVersion = createNiceMock(RepositoryVersionEntity.class);
+ expect(repositoryVersion.getVersion()).andReturn("2.3.4.1-1").atLeastOnce();
+
+ Service serviceHdfs = createNiceMock(Service.class);
+ expect(serviceHdfs.getName()).andReturn("HDFS").atLeastOnce();
+ expect(serviceHdfs.getDisplayName()).andReturn("HDFS").atLeastOnce();
+ expect(serviceHdfs.getDesiredRepositoryVersion()).andReturn(repositoryVersion).atLeastOnce();
+
+ Service serviceHBase = createNiceMock(Service.class);
+ expect(serviceHBase.getName()).andReturn("HBASE").atLeastOnce();
+ expect(serviceHBase.getDisplayName()).andReturn("HBase").atLeastOnce();
+ expect(serviceHBase.getDesiredRepositoryVersion()).andReturn(repositoryVersion).atLeastOnce();
+
+ // !!! should never be accessed as it's not in any VDF
+ Service serviceAMS = createNiceMock(Service.class);
+
+ expect(cluster.getServices()).andReturn(ImmutableMap.<String, Service>builder()
+ .put("HDFS", serviceHdfs)
+ .put("HBASE", serviceHBase)
+ .put("AMBARI_METRICS", serviceAMS).build()).atLeastOnce();
+
+ replay(cluster, repositoryVersion, serviceHdfs, serviceHBase);
+
+ File f = new File("src/test/resources/version_definition_test_maint_partial.xml");
+ VersionDefinitionXml xml = VersionDefinitionXml.load(f.toURI().toURL());
+ xml.release.version = "2.3.4.1";
+ xml.release.build = "2";
+ ClusterVersionSummary summary = xml.getClusterSummary(cluster);
+ assertEquals(1, summary.getAvailableServiceNames().size());
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
index 422c0ec..ead035c 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelperTest.java
@@ -53,6 +53,6 @@ public class RepositoryVersionHelperTest {
repositories.add(repository);
final String serialized = helper.serializeOperatingSystems(repositories);
- Assert.assertEquals("[{\"OperatingSystems/ambari_managed_repositories\":true,\"repositories\":[{\"Repositories/base_url\":\"baseurl\",\"Repositories/repo_id\":\"repoId\",\"Repositories/unique\":true}],\"OperatingSystems/os_type\":\"os\"}]", serialized);
+ Assert.assertEquals("[{\"OperatingSystems/ambari_managed_repositories\":true,\"repositories\":[{\"Repositories/base_url\":\"baseurl\",\"Repositories/repo_id\":\"repoId\",\"Repositories/unique\":true,\"Repositories/tags\":[]}],\"OperatingSystems/os_type\":\"os\"}]", serialized);
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index 08d6a7f..3cf16bc 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -78,6 +78,9 @@ import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalType;
import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
import org.apache.ambari.server.state.stack.OsFamily;
import org.apache.commons.io.FileUtils;
@@ -654,7 +657,7 @@ public class UpgradeCatalog260Test {
expect(artifactEntity.getArtifactData()).andReturn(kerberosDescriptor.toMap()).once();
Capture<Map<String, Object>> captureMap = newCapture();
- expect(artifactEntity.getForeignKeys()).andReturn(Collections.singletonMap("cluster", "2"));
+ expect(artifactEntity.getForeignKeys()).andReturn(Collections.singletonMap("cluster", "2")).times(2);
artifactEntity.setArtifactData(capture(captureMap));
expectLastCall().once();
@@ -671,11 +674,26 @@ public class UpgradeCatalog260Test {
expect(config.getTag()).andReturn("version1").anyTimes();
expect(config.getType()).andReturn("ranger-kms-audit").anyTimes();
+ Map<String, String> hsiProperties = new HashMap<>();
+ hsiProperties.put("hive.llap.daemon.keytab.file", "/etc/security/keytabs/hive.service.keytab");
+ hsiProperties.put("hive.llap.zk.sm.keytab.file", "/etc/security/keytabs/hive.llap.zk.sm.keytab");
+
+ Config hsiConfig = createMock(Config.class);
+ expect(hsiConfig.getProperties()).andReturn(hsiProperties).anyTimes();
+ expect(hsiConfig.getPropertiesAttributes()).andReturn(Collections.<String, Map<String, String>>emptyMap()).anyTimes();
+ expect(hsiConfig.getTag()).andReturn("version1").anyTimes();
+ expect(hsiConfig.getType()).andReturn("hive-interactive-site").anyTimes();
+
Config newConfig = createMock(Config.class);
expect(newConfig.getTag()).andReturn("version2").anyTimes();
expect(newConfig.getType()).andReturn("ranger-kms-audit").anyTimes();
+ Config newHsiConfig = createMock(Config.class);
+ expect(newHsiConfig.getTag()).andReturn("version2").anyTimes();
+ expect(newHsiConfig.getType()).andReturn("hive-interactive-site").anyTimes();
+
ServiceConfigVersionResponse response = createMock(ServiceConfigVersionResponse.class);
+ ServiceConfigVersionResponse response1 = createMock(ServiceConfigVersionResponse.class);
StackId stackId = createMock(StackId.class);
@@ -690,6 +708,14 @@ public class UpgradeCatalog260Test {
expect(cluster.getConfig(eq("ranger-kms-audit"), anyString())).andReturn(newConfig).once();
expect(cluster.addDesiredConfig("ambari-upgrade", Collections.singleton(newConfig), "Updated ranger-kms-audit during Ambari Upgrade from 2.5.2 to 2.6.0.")).andReturn(response).once();
+ //HIVE
+ expect(cluster.getDesiredConfigByType("hive-site")).andReturn(hsiConfig).anyTimes();
+ expect(cluster.getDesiredConfigByType("hive-interactive-site")).andReturn(hsiConfig).anyTimes();
+ expect(cluster.getConfigsByType("hive-interactive-site")).andReturn(Collections.singletonMap("version1", hsiConfig)).anyTimes();
+ expect(cluster.getServiceByConfigType("hive-interactive-site").getName()).andReturn("HIVE").anyTimes();
+ expect(cluster.getConfig(eq("hive-interactive-site"), anyString())).andReturn(newHsiConfig).anyTimes();
+
+
final Clusters clusters = injector.getInstance(Clusters.class);
expect(clusters.getCluster(2L)).andReturn(cluster).anyTimes();
@@ -702,12 +728,17 @@ public class UpgradeCatalog260Test {
.andReturn(null)
.once();
- replay(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, response, controller, stackId);
+ Capture<? extends Map<String, String>> captureHsiProperties = newCapture();
+
+ expect(controller.createConfig(eq(cluster), eq(stackId), eq("hive-interactive-site"), capture(captureHsiProperties), anyString(), anyObject(Map.class), 1L))
+ .andReturn(null)
+ .anyTimes();
+
+ replay(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, hsiConfig, newHsiConfig, response, response1, controller, stackId);
UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class);
upgradeCatalog260.updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
verify(artifactDAO, artifactEntity, cluster, clusters, config, newConfig, response, controller, stackId);
-
KerberosDescriptor kerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(captureMap.getValue());
Assert.assertNotNull(kerberosDescriptorUpdated);
@@ -731,6 +762,39 @@ public class UpgradeCatalog260Test {
Assert.assertTrue(captureProperties.hasCaptured());
Map<String, String> newProperties = captureProperties.getValue();
Assert.assertEquals("correct_value@EXAMPLE.COM", newProperties.get("xasecure.audit.jaas.Client.option.principal"));
+
+ // YARN's NodeManager identities (1). 'llap_zk_hive' and (2). 'llap_task_hive' checks after modifications.
+ Map<String, List<String>> identitiesMap = new HashMap<>();
+ identitiesMap.put("llap_zk_hive", new ArrayList<String>() {{
+ add("hive-interactive-site/hive.llap.zk.sm.keytab.file");
+ add("hive-interactive-site/hive.llap.zk.sm.principal");
+ }});
+ identitiesMap.put("llap_task_hive", new ArrayList<String>() {{
+ add("hive-interactive-site/hive.llap.task.keytab.file");
+ add("hive-interactive-site/hive.llap.task.principal");
+ }});
+ for (String llapIdentity : identitiesMap.keySet()) {
+ KerberosIdentityDescriptor yarnKerberosIdentityDescriptor = kerberosDescriptorUpdated.getService("YARN").getComponent("NODEMANAGER").getIdentity(llapIdentity);
+ Assert.assertNotNull(yarnKerberosIdentityDescriptor);
+ Assert.assertEquals("/HIVE/HIVE_SERVER/hive_server_hive", yarnKerberosIdentityDescriptor.getReference());
+
+ KerberosKeytabDescriptor yarnKerberosKeytabDescriptor = yarnKerberosIdentityDescriptor.getKeytabDescriptor();
+ Assert.assertNotNull(yarnKerberosKeytabDescriptor);
+
+ Assert.assertEquals(null, yarnKerberosKeytabDescriptor.getGroupAccess());
+ Assert.assertEquals(null, yarnKerberosKeytabDescriptor.getGroupName());
+ Assert.assertEquals(null, yarnKerberosKeytabDescriptor.getOwnerAccess());
+ Assert.assertEquals(null, yarnKerberosKeytabDescriptor.getOwnerName());
+ Assert.assertEquals(null, yarnKerberosKeytabDescriptor.getFile());
+ Assert.assertEquals(identitiesMap.get(llapIdentity).get(0), yarnKerberosKeytabDescriptor.getConfiguration());
+
+ KerberosPrincipalDescriptor yarnKerberosPrincipalDescriptor = yarnKerberosIdentityDescriptor.getPrincipalDescriptor();
+ Assert.assertNotNull(yarnKerberosPrincipalDescriptor);
+ Assert.assertEquals(null, yarnKerberosPrincipalDescriptor.getName());
+ Assert.assertEquals(KerberosPrincipalType.SERVICE, yarnKerberosPrincipalDescriptor.getType());
+ Assert.assertEquals(null, yarnKerberosPrincipalDescriptor.getValue());
+ Assert.assertEquals(identitiesMap.get(llapIdentity).get(1), yarnKerberosPrincipalDescriptor.getConfiguration());
+ }
}
@Test
@@ -794,6 +858,72 @@ public class UpgradeCatalog260Test {
}
@Test
+ public void testUpdateHiveConfigs() throws Exception {
+
+ Map<String, String> oldProperties = new HashMap<String, String>() {
+ {
+ put("hive.llap.zk.sm.keytab.file", "/etc/security/keytabs/hive.llap.zk.sm.keytab");
+ put("hive.llap.daemon.keytab.file", "/etc/security/keytabs/hive.service.keytab");
+ put("hive.llap.task.keytab.file", "/etc/security/keytabs/hive.llap.task.keytab");
+ }
+ };
+ Map<String, String> newProperties = new HashMap<String, String>() {
+ {
+ put("hive.llap.zk.sm.keytab.file", "/etc/security/keytabs/hive.service.keytab");
+ put("hive.llap.daemon.keytab.file", "/etc/security/keytabs/hive.service.keytab");
+ put("hive.llap.task.keytab.file", "/etc/security/keytabs/hive.service.keytab");
+ }
+ };
+
+ EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+ Clusters clusters = easyMockSupport.createNiceMock(Clusters.class);
+ final Cluster cluster = easyMockSupport.createNiceMock(Cluster.class);
+ Config mockHsiConfigs = easyMockSupport.createNiceMock(Config.class);
+
+ expect(clusters.getClusters()).andReturn(new HashMap<String, Cluster>() {{
+ put("normal", cluster);
+ }}).once();
+ expect(cluster.getDesiredConfigByType("hive-interactive-site")).andReturn(mockHsiConfigs).atLeastOnce();
+ expect(mockHsiConfigs.getProperties()).andReturn(oldProperties).anyTimes();
+
+ Injector injector = easyMockSupport.createNiceMock(Injector.class);
+ expect(injector.getInstance(Gson.class)).andReturn(null).anyTimes();
+ expect(injector.getInstance(MaintenanceStateHelper.class)).andReturn(null).anyTimes();
+
+ replay(injector, clusters, mockHsiConfigs, cluster);
+
+ AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+ .addMockedMethod("createConfiguration")
+ .addMockedMethod("getClusters", new Class[] { })
+ .addMockedMethod("createConfig")
+ .withConstructor(createNiceMock(ActionManager.class), clusters, injector)
+ .createNiceMock();
+
+ Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+ Capture<Map> propertiesCapture = EasyMock.newCapture();
+
+ expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+ expect(controller.getClusters()).andReturn(clusters).anyTimes();
+ expect(controller.createConfig(anyObject(Cluster.class), anyObject(StackId.class), anyString(), capture(propertiesCapture), anyString(),
+ anyObject(Map.class), 1L)).andReturn(createNiceMock(Config.class)).once();
+ replay(controller, injector2);
+
+ // This tests the update of HSI config 'hive.llap.daemon.keytab.file'.
+ UpgradeCatalog260 upgradeCatalog260 = new UpgradeCatalog260(injector2);
+ // Set 'isYarnKerberosDescUpdated' value to true, implying kerberos descriptor was updated.
+ upgradeCatalog260.updateYarnKerberosDescUpdatedList("hive.llap.zk.sm.keytab.file");
+ upgradeCatalog260.updateYarnKerberosDescUpdatedList("hive.llap.task.keytab.file");
+
+ upgradeCatalog260.updateHiveConfigs();
+
+ easyMockSupport.verifyAll();
+
+ Map<String, String> updatedProperties = propertiesCapture.getValue();
+ assertTrue(Maps.difference(newProperties, updatedProperties).areEqual());
+ }
+
+ @Test
public void testHDFSWidgetUpdate() throws Exception {
final Clusters clusters = createNiceMock(Clusters.class);
final Cluster cluster = createNiceMock(Cluster.class);