You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2017/02/25 19:53:48 UTC

ambari git commit: AMBARI-20152. Use storm user instead of nimbus user for ranger audit (oleewere)

Repository: ambari
Updated Branches:
  refs/heads/branch-2.5 c9f9215e5 -> f7068818a


AMBARI-20152. Use storm user instead of nimbus user for ranger audit (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f7068818
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f7068818
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f7068818

Branch: refs/heads/branch-2.5
Commit: f7068818a1d8576b1c91d6b487699adb825b6608
Parents: c9f9215
Author: oleewere <ol...@gmail.com>
Authored: Thu Feb 23 21:42:13 2017 +0100
Committer: oleewere <ol...@gmail.com>
Committed: Sat Feb 25 20:03:16 2017 +0100

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog250.java       |  91 ++++++++
 .../ATLAS/0.1.0.2.3/kerberos.json               |   5 +-
 .../ATLAS/0.7.0.2.5/kerberos.json               |   5 +-
 .../LOGSEARCH/0.5.0/kerberos.json               |   5 +-
 .../common-services/RANGER/0.6.0/kerberos.json  |   5 +-
 .../common-services/STORM/1.0.1/kerberos.json   |   2 +-
 .../server/upgrade/UpgradeCatalog250Test.java   | 125 +++++++++--
 ...test_kerberos_descriptor_2_5_infra_solr.json | 217 +++++++++++++++++++
 8 files changed, 427 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
index c488cb6..39a129d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog250.java
@@ -24,19 +24,30 @@ import com.google.inject.Injector;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.CommandExecutionType;
+import org.apache.ambari.server.collections.Predicate;
+import org.apache.ambari.server.collections.functors.ContainsPredicate;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.AlertsDAO;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
 import org.apache.ambari.server.orm.entities.AlertCurrentEntity;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.AlertHistoryEntity;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosIdentityDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosKeytabDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosPrincipalDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -179,6 +190,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
     updateRangerUrlConfigs();
     addManageServiceAutoStartPermissions();
     addManageAlertNotificationsPermissions();
+    updateKerberosDescriptorArtifacts();
   }
 
   /**
@@ -380,6 +392,82 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
     }
   }
 
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  protected void updateKerberosDescriptorArtifact(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity) throws AmbariException {
+    if (artifactEntity != null) {
+      Map<String, Object> data = artifactEntity.getArtifactData();
+
+      if (data != null) {
+        final KerberosDescriptor kerberosDescriptor = new KerberosDescriptorFactory().createInstance(data);
+
+        if (kerberosDescriptor != null) {
+          KerberosServiceDescriptor logSearchKerberosDescriptor = kerberosDescriptor.getService("LOGSEARCH");
+          KerberosServiceDescriptor atlasKerberosDescriptor = kerberosDescriptor.getService("ATLAS");
+          KerberosServiceDescriptor rangerKerberosDescriptor = kerberosDescriptor.getService("RANGER");
+          addInfrSolrDescriptor(artifactDAO, artifactEntity, kerberosDescriptor, atlasKerberosDescriptor, "ATLAS_SERVER");
+          addInfrSolrDescriptor(artifactDAO, artifactEntity, kerberosDescriptor, logSearchKerberosDescriptor, "LOGSEARCH_SERVER");
+          addInfrSolrDescriptor(artifactDAO, artifactEntity, kerberosDescriptor, rangerKerberosDescriptor, "RANGER_ADMIN");
+          KerberosServiceDescriptor stormKerberosDescriptor = kerberosDescriptor.getService("STORM");
+          if (stormKerberosDescriptor != null) {
+            KerberosComponentDescriptor componentDescriptor = stormKerberosDescriptor.getComponent("NIMBUS");
+            if (componentDescriptor != null) {
+              KerberosIdentityDescriptor origIdentityDescriptor = componentDescriptor.getIdentity("/STORM/NIMBUS/nimbus_server");
+              if (origIdentityDescriptor != null) {
+                KerberosPrincipalDescriptor origPrincipalDescriptor = origIdentityDescriptor.getPrincipalDescriptor();
+                KerberosPrincipalDescriptor newPrincipalDescriptor = new KerberosPrincipalDescriptor(
+                  null,
+                  null,
+                  (origPrincipalDescriptor == null) ?
+                    "ranger-storm-audit/xasecure.audit.jaas.Client.option.principal" : origPrincipalDescriptor.getConfiguration(),
+                  null
+                );
+                KerberosKeytabDescriptor origKeytabDescriptor = origIdentityDescriptor.getKeytabDescriptor();
+                KerberosKeytabDescriptor newKeytabDescriptor = new KerberosKeytabDescriptor(
+                  null,
+                  null,
+                  null,
+                  null,
+                  null,
+                  (origKeytabDescriptor == null) ?
+                    "ranger-storm-audit/xasecure.audit.jaas.Client.option.keyTab" : origKeytabDescriptor.getConfiguration(),
+                  false);
+                componentDescriptor.removeIdentity("/STORM/NIMBUS/nimbus_server");
+                componentDescriptor.putIdentity(new KerberosIdentityDescriptor("/STORM/storm_components", null, newPrincipalDescriptor, newKeytabDescriptor, null));
+
+                artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+                artifactDAO.merge(artifactEntity);
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Add /AMBARI-INFRA/INFRA_SOLR/infra-solr reference to specific service component
+   */
+  private void addInfrSolrDescriptor(ArtifactDAO artifactDAO, ArtifactEntity artifactEntity, KerberosDescriptor kerberosDescriptor,
+                                     KerberosServiceDescriptor serviceDescriptor, String componentName) {
+    if (serviceDescriptor != null) {
+      KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent(componentName);
+      if (componentDescriptor != null) {
+        KerberosIdentityDescriptor origIdentityDescriptor = componentDescriptor.getIdentity("/AMBARI_INFRA/INFRA_SOLR/infra-solr");
+        if (origIdentityDescriptor != null) {
+          LOG.info("/AMBARI_INFRA/INFRA_SOLR/infra-solr identity already exists in {} component", componentName);
+        } else {
+          Predicate predicate = ContainsPredicate.fromMap(Collections.<String, Object>singletonMap(ContainsPredicate.NAME, Arrays.asList("services", "AMBARI_INFRA")));
+          componentDescriptor.putIdentity(new KerberosIdentityDescriptor("/AMBARI_INFRA/INFRA_SOLR/infra-solr",null, null, null, predicate));
+          artifactEntity.setArtifactData(kerberosDescriptor.toMap());
+          artifactDAO.merge(artifactEntity);
+        }
+      }
+    }
+  }
+
   protected void updateTablesForZeppelinViewRemoval() throws SQLException {
     dbAccessor.executeQuery("DELETE from viewinstance WHERE view_name='ZEPPELIN{1.0.0}'", true);
     dbAccessor.executeQuery("DELETE from viewmain WHERE view_name='ZEPPELIN{1.0.0}'", true);
@@ -1062,4 +1150,7 @@ public class UpgradeCatalog250 extends AbstractUpgradeCatalog {
       updateConfigurationPropertiesForCluster(cluster, configType, updateProperty, true, false);
     }
   }
+
+
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/kerberos.json b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/kerberos.json
index 0c25c95..4fe4d32 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/kerberos.json
@@ -50,7 +50,10 @@
               }
             },
             {
-              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr",
+              "when" : {
+                "contains" : ["services", "AMBARI_INFRA"]
+              }
             }
           ]
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
index d024146..e136bcf 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.2.5/kerberos.json
@@ -89,7 +89,10 @@
               "name": "/KAFKA/KAFKA_BROKER/kafka_broker"
             },
             {
-              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr",
+              "when" : {
+                "contains" : ["services", "AMBARI_INFRA"]
+              }
             }
           ]
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
index 60c8afb..9e0f12d 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/kerberos.json
@@ -32,7 +32,10 @@
               }
             },
             {
-              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr",
+              "when" : {
+                "contains" : ["services", "AMBARI_INFRA"]
+              }
             }
           ]
         },

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
index c5b3201..1fc8acf 100644
--- a/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.6.0/kerberos.json
@@ -74,7 +74,10 @@
               }
             },
             {
-              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr"
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr",
+              "when" : {
+                "contains" : ["services", "AMBARI_INFRA"]
+              }
             }
           ]
         },

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/main/resources/common-services/STORM/1.0.1/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/1.0.1/kerberos.json b/ambari-server/src/main/resources/common-services/STORM/1.0.1/kerberos.json
index fecef7c..fa2f6db 100644
--- a/ambari-server/src/main/resources/common-services/STORM/1.0.1/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/STORM/1.0.1/kerberos.json
@@ -106,7 +106,7 @@
               }
             },
             {
-              "name": "/STORM/NIMBUS/nimbus_server",
+              "name": "/STORM/storm_components",
               "principal": {
                 "configuration": "ranger-storm-audit/xasecure.audit.jaas.Client.option.principal"
               },

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
index 3f934d7..64536cb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog250Test.java
@@ -39,10 +39,12 @@ import org.apache.ambari.server.controller.KerberosHelper;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
 import org.apache.ambari.server.orm.dao.PermissionDAO;
 import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
 import org.apache.ambari.server.orm.dao.RoleAuthorizationDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
 import org.apache.ambari.server.orm.entities.RoleAuthorizationEntity;
@@ -50,6 +52,9 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -64,7 +69,9 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 
 import javax.persistence.EntityManager;
+import java.io.File;
 import java.lang.reflect.Method;
+import java.net.URL;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -280,6 +287,7 @@ public class UpgradeCatalog250Test {
     Method updateYarnSite = UpgradeCatalog250.class.getDeclaredMethod("updateYarnSite");
     Method updateAlerts = UpgradeCatalog250.class.getDeclaredMethod("updateStormAlerts");
     Method removeAlertDuplicates = UpgradeCatalog250.class.getDeclaredMethod("removeAlertDuplicates");
+    Method updateKerberosDescriptorArtifacts = AbstractUpgradeCatalog.class.getDeclaredMethod("updateKerberosDescriptorArtifacts");
 
     UpgradeCatalog250 upgradeCatalog250 = createMockBuilder(UpgradeCatalog250.class)
       .addMockedMethod(updateAmsConfigs)
@@ -299,6 +307,7 @@ public class UpgradeCatalog250Test {
       .addMockedMethod(updateYarnSite)
       .addMockedMethod(updateAlerts)
       .addMockedMethod(removeAlertDuplicates)
+      .addMockedMethod(updateKerberosDescriptorArtifacts)
       .createMock();
 
 
@@ -353,6 +362,9 @@ public class UpgradeCatalog250Test {
     upgradeCatalog250.removeAlertDuplicates();
     expectLastCall().once();
 
+    upgradeCatalog250.updateKerberosDescriptorArtifacts();
+    expectLastCall().once();
+
     replay(upgradeCatalog250);
 
     upgradeCatalog250.executeDMLUpdates();
@@ -1097,9 +1109,9 @@ public class UpgradeCatalog250Test {
   public void testLogSearchUpdateConfigs() throws Exception {
     reset(clusters, cluster);
     expect(clusters.getClusters()).andReturn(ImmutableMap.of("normal", cluster)).once();
-    
+
     EasyMockSupport easyMockSupport = new EasyMockSupport();
-    
+
     Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
     AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
         .addMockedMethod("createConfiguration")
@@ -1110,17 +1122,17 @@ public class UpgradeCatalog250Test {
 
     expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
     expect(controller.getClusters()).andReturn(clusters).anyTimes();
-    
+
     Map<String, String> oldLogSearchProperties = ImmutableMap.of(
         "logsearch.external.auth.enabled", "true",
         "logsearch.external.auth.host_url", "host_url",
         "logsearch.external.auth.login_url", "login_url");
-    
+
     Map<String, String> expectedLogSearchProperties = ImmutableMap.of(
         "logsearch.auth.external_auth.enabled", "true",
         "logsearch.auth.external_auth.host_url", "host_url",
         "logsearch.auth.external_auth.login_url", "login_url");
-    
+
     Config mockLogSearchProperties = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("logsearch-properties")).andReturn(mockLogSearchProperties).atLeastOnce();
     expect(mockLogSearchProperties.getProperties()).andReturn(oldLogSearchProperties).anyTimes();
@@ -1130,10 +1142,10 @@ public class UpgradeCatalog250Test {
 
     Map<String, String> oldLogFeederEnv = ImmutableMap.of(
         "content", "infra_solr_ssl_enabled");
-    
+
     Map<String, String> expectedLogFeederEnv = ImmutableMap.of(
         "content", "logfeeder_use_ssl");
-    
+
     Config mockLogFeederEnv = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("logfeeder-env")).andReturn(mockLogFeederEnv).atLeastOnce();
     expect(mockLogFeederEnv.getProperties()).andReturn(oldLogFeederEnv).anyTimes();
@@ -1146,10 +1158,10 @@ public class UpgradeCatalog250Test {
         "logsearch_solr_audit_logs_zk_node", "zk_node",
         "logsearch_solr_audit_logs_zk_quorum", "zk_quorum",
         "content", "infra_solr_ssl_enabled or logsearch_ui_protocol == 'https'");
-    
+
     Map<String, String> expectedLogSearchEnv = ImmutableMap.of(
         "content", "logsearch_use_ssl");
-    
+
     Config mockLogSearchEnv = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("logsearch-env")).andReturn(mockLogSearchEnv).atLeastOnce();
     expect(mockLogSearchEnv.getProperties()).andReturn(oldLogSearchEnv).anyTimes();
@@ -1177,7 +1189,7 @@ public class UpgradeCatalog250Test {
         "    <param name=\"maxBackupIndex\" value=\"14\" />\n" +
         "    <layout class=\"org.apache.ambari.logsearch.appender.LogsearchConversion\" />\n" +
         "  </appender>");
-    
+
     Map<String, String> expectedLogFeederLog4j = ImmutableMap.of(
         "content",
         "    <appender name=\"rolling_file\" class=\"org.apache.log4j.RollingFileAppender\">\n" +
@@ -1202,7 +1214,7 @@ public class UpgradeCatalog250Test {
         "logfeeder_log_maxbackupindex", "12",
         "logfeeder_json_log_maxfilesize", "13",
         "logfeeder_json_log_maxbackupindex", "14");
-    
+
     Config mockLogFeederLog4j = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("logfeeder-log4j")).andReturn(mockLogFeederLog4j).atLeastOnce();
     expect(mockLogFeederLog4j.getProperties()).andReturn(oldLogFeederLog4j).anyTimes();
@@ -1260,7 +1272,7 @@ public class UpgradeCatalog250Test {
         "    <priority value=\"warn\"/>\n" +
         "    <appender-ref ref=\"rolling_file_json\"/>\n" +
         "  </category>");
-    
+
     Map<String, String> expectedLogSearchLog4j = new HashMap<>();
       expectedLogSearchLog4j.put("content",
         "  <appender name=\"rolling_file\" class=\"org.apache.log4j.RollingFileAppender\">\n" +
@@ -1320,7 +1332,7 @@ public class UpgradeCatalog250Test {
       expectedLogSearchLog4j.put("logsearch_audit_log_maxbackupindex", "16");
       expectedLogSearchLog4j.put("logsearch_perf_log_maxfilesize", "17");
       expectedLogSearchLog4j.put("logsearch_perf_log_maxbackupindex", "18");
-    
+
     Config mockLogSearchLog4j = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("logsearch-log4j")).andReturn(mockLogSearchLog4j).atLeastOnce();
     expect(mockLogSearchLog4j.getProperties()).andReturn(oldLogSearchLog4j).anyTimes();
@@ -1349,14 +1361,14 @@ public class UpgradeCatalog250Test {
     Map<String, String> updatedLogSearchLog4j = logSearchLog4jCapture.getValue();
     assertTrue(Maps.difference(expectedLogSearchLog4j, updatedLogSearchLog4j).areEqual());
   }
-  
+
   @Test
   public void testAmbariInfraUpdateConfigs() throws Exception {
     reset(clusters, cluster);
     expect(clusters.getClusters()).andReturn(ImmutableMap.of("normal", cluster)).once();
-    
+
     EasyMockSupport easyMockSupport = new EasyMockSupport();
-    
+
     Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
     AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
         .addMockedMethod("createConfiguration")
@@ -1373,13 +1385,13 @@ public class UpgradeCatalog250Test {
                    "SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_keystore_password}}\n" +
                    "SOLR_KERB_NAME_RULES={{infra_solr_kerberos_name_rules}}\n" +
                    "SOLR_AUTHENTICATION_OPTS=\" -DauthenticationPlugin=org.apache.solr.security.KerberosPlugin -Djava.security.auth.login.config=$SOLR_JAAS_FILE -Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} -Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} -Dsolr.kerberos.cookie.domain=${SOLR_HOST} -Dsolr.kerberos.name.rules=${SOLR_KERB_NAME_RULES}\"");
-    
+
     Map<String, String> expectedInfraSolrEnv = ImmutableMap.of(
         "content", "SOLR_SSL_TRUST_STORE={{infra_solr_truststore_location}}\n" +
                    "SOLR_SSL_TRUST_STORE_PASSWORD={{infra_solr_truststore_password}}\n" +
                    "SOLR_KERB_NAME_RULES=\"{{infra_solr_kerberos_name_rules}}\"\n" +
                    "SOLR_AUTHENTICATION_OPTS=\" -DauthenticationPlugin=org.apache.solr.security.KerberosPlugin -Djava.security.auth.login.config=$SOLR_JAAS_FILE -Dsolr.kerberos.principal=${SOLR_KERB_PRINCIPAL} -Dsolr.kerberos.keytab=${SOLR_KERB_KEYTAB} -Dsolr.kerberos.cookie.domain=${SOLR_HOST}\"");
-    
+
     Config mockInfraSolrEnv = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("infra-solr-env")).andReturn(mockInfraSolrEnv).atLeastOnce();
     expect(mockInfraSolrEnv.getProperties()).andReturn(oldInfraSolrEnv).anyTimes();
@@ -1390,13 +1402,13 @@ public class UpgradeCatalog250Test {
     Map<String, String> oldInfraSolrLog4j = ImmutableMap.of(
         "content", "log4j.appender.file.MaxFileSize=15MB\n" +
                    "log4j.appender.file.MaxBackupIndex=5\n");
-    
+
     Map<String, String> expectedInfraSolrLog4j = ImmutableMap.of(
         "content", "log4j.appender.file.MaxFileSize={{infra_log_maxfilesize}}MB\n" +
                    "log4j.appender.file.MaxBackupIndex={{infra_log_maxbackupindex}}\n",
         "infra_log_maxfilesize", "15",
         "infra_log_maxbackupindex", "5");
-    
+
     Config mockInfraSolrLog4j = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("infra-solr-log4j")).andReturn(mockInfraSolrLog4j).atLeastOnce();
     expect(mockInfraSolrLog4j.getProperties()).andReturn(oldInfraSolrLog4j).anyTimes();
@@ -1408,14 +1420,14 @@ public class UpgradeCatalog250Test {
         "content", "log4j.appender.file.File\u003d{{infra_client_log|default(\u0027/var/log/ambari-infra-solr-client/solr-client.log\u0027)}}\n" +
                    "log4j.appender.file.MaxFileSize=55MB\n" +
                    "log4j.appender.file.MaxBackupIndex=10\n");
-    
+
     Map<String, String> expectedInfraSolrClientLog4j = ImmutableMap.of(
         "content", "log4j.appender.file.File\u003d{{solr_client_log|default(\u0027/var/log/ambari-infra-solr-client/solr-client.log\u0027)}}\n" +
                    "log4j.appender.file.MaxFileSize={{solr_client_log_maxfilesize}}MB\n" +
                    "log4j.appender.file.MaxBackupIndex={{solr_client_log_maxbackupindex}}\n",
         "infra_client_log_maxfilesize", "55",
         "infra_client_log_maxbackupindex", "10");
-    
+
     Config mockInfraSolrClientLog4j = easyMockSupport.createNiceMock(Config.class);
     expect(cluster.getDesiredConfigByType("infra-solr-client-log4j")).andReturn(mockInfraSolrClientLog4j).atLeastOnce();
     expect(mockInfraSolrClientLog4j.getProperties()).andReturn(oldInfraSolrClientLog4j).anyTimes();
@@ -1438,7 +1450,7 @@ public class UpgradeCatalog250Test {
     Map<String, String> updatedInfraSolrClientLog4j = infraSolrClientLog4jCapture.getValue();
     assertTrue(Maps.difference(expectedInfraSolrClientLog4j, updatedInfraSolrClientLog4j).areEqual());
   }
-  
+
   @Test
   public void testUpdateHiveConfigs() throws Exception {
     reset(clusters, cluster);
@@ -1588,6 +1600,73 @@ public class UpgradeCatalog250Test {
   }
 
   @Test
+  public void testUpdateKerberosDescriptorArtifact() throws Exception {
+    final KerberosDescriptorFactory kerberosDescriptorFactory = new KerberosDescriptorFactory();
+
+    KerberosServiceDescriptor serviceDescriptor;
+
+    URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_kerberos_descriptor_2_5_infra_solr.json");
+    Assert.assertNotNull(systemResourceURL);
+
+    final KerberosDescriptor kerberosDescriptorOrig = kerberosDescriptorFactory.createInstance(new File(systemResourceURL.getFile()));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("LOGSEARCH");
+    Assert.assertNotNull(serviceDescriptor);
+    Assert.assertNotNull(serviceDescriptor.getComponent("LOGSEARCH_SERVER"));
+    Assert.assertNotNull(serviceDescriptor.getComponent("LOGSEARCH_SERVER").getIdentity("logsearch"));
+    Assert.assertNotNull(serviceDescriptor.getComponent("LOGSEARCH_SERVER").getIdentity("/AMBARI_INFRA/INFRA_SOLR/infra-solr"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("ATLAS");
+    Assert.assertNotNull(serviceDescriptor);
+    Assert.assertNotNull(serviceDescriptor.getComponent("ATLAS_SERVER"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("RANGER");
+    Assert.assertNotNull(serviceDescriptor);
+    Assert.assertNotNull(serviceDescriptor.getComponent("RANGER_ADMIN"));
+
+    serviceDescriptor = kerberosDescriptorOrig.getService("STORM");
+    Assert.assertNotNull(serviceDescriptor);
+    Assert.assertNotNull(serviceDescriptor.getComponent("NIMBUS"));
+
+    UpgradeCatalog250 upgradeMock = createMockBuilder(UpgradeCatalog250.class).createMock();
+
+
+    ArtifactEntity artifactEntity = createNiceMock(ArtifactEntity.class);
+    expect(artifactEntity.getArtifactData())
+      .andReturn(kerberosDescriptorOrig.toMap())
+      .once();
+
+    Capture<Map<String, Object>> updateData = Capture.newInstance(CaptureType.ALL);
+    artifactEntity.setArtifactData(capture(updateData));
+    expectLastCall().times(3);
+
+    ArtifactDAO artifactDAO = createNiceMock(ArtifactDAO.class);
+    expect(artifactDAO.merge(anyObject(ArtifactEntity.class))).andReturn(artifactEntity).times(3);
+
+    replay(artifactEntity, artifactDAO, upgradeMock);
+    upgradeMock.updateKerberosDescriptorArtifact(artifactDAO, artifactEntity);
+    verify(artifactEntity, artifactDAO, upgradeMock);
+
+    KerberosDescriptor atlasKerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(updateData.getValues().get(0));
+    KerberosDescriptor rangerKerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(updateData.getValues().get(1));
+    KerberosDescriptor stormKerberosDescriptorUpdated = new KerberosDescriptorFactory().createInstance(updateData.getValues().get(2));
+
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getIdentity("spnego"));
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getService("LOGSEARCH"));
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getService("LOGSEARCH").getComponent("LOGSEARCH_SERVER"));
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getService("LOGSEARCH").getComponent("LOGSEARCH_SERVER").getIdentity("/AMBARI_INFRA/INFRA_SOLR/infra-solr"));
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getService("ATLAS"));
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getService("ATLAS").getComponent("ATLAS_SERVER"));
+    Assert.assertNotNull(atlasKerberosDescriptorUpdated.getService("ATLAS").getComponent("ATLAS_SERVER").getIdentity("/AMBARI_INFRA/INFRA_SOLR/infra-solr"));
+    Assert.assertNotNull(rangerKerberosDescriptorUpdated.getService("RANGER"));
+    Assert.assertNotNull(rangerKerberosDescriptorUpdated.getService("RANGER").getComponent("RANGER_ADMIN"));
+    Assert.assertNotNull(rangerKerberosDescriptorUpdated.getService("RANGER").getComponent("RANGER_ADMIN").getIdentity("/AMBARI_INFRA/INFRA_SOLR/infra-solr"));
+    Assert.assertNotNull(stormKerberosDescriptorUpdated.getService("STORM"));
+    Assert.assertNotNull(stormKerberosDescriptorUpdated.getService("STORM").getComponent("NIMBUS"));
+    Assert.assertNotNull(stormKerberosDescriptorUpdated.getService("STORM").getComponent("NIMBUS").getIdentity("/STORM/storm_components"));
+  }
+
+  @Test
   public void testCreateRoleAuthorizations() throws AmbariException, SQLException {
 
     EasyMockSupport easyMockSupport = new EasyMockSupport();

http://git-wip-us.apache.org/repos/asf/ambari/blob/f7068818/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_5_infra_solr.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_5_infra_solr.json b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_5_infra_solr.json
new file mode 100644
index 0000000..172ad05
--- /dev/null
+++ b/ambari-server/src/test/resources/kerberos/test_kerberos_descriptor_2_5_infra_solr.json
@@ -0,0 +1,217 @@
+{
+  "properties": {
+    "realm": "${kerberos-env/realm}",
+    "keytab_dir": "/etc/security/keytabs"
+  },
+  "identities": [
+    {
+      "name": "spnego",
+      "principal": {
+        "value": "HTTP/_HOST@${realm}",
+        "type": "service"
+      },
+      "keytab": {
+        "file": "${keytab_dir}/spnego.service.keytab",
+        "owner": {
+          "name": "root",
+          "access": "r"
+        },
+        "group": {
+          "name": "${cluster-env/user_group}",
+          "access": "r"
+        }
+      }
+    }
+  ],
+  "services": [
+    {
+      "name": "ATLAS",
+      "identities": [
+        {
+          "name": "/spnego"
+        }
+      ],
+      "components": [
+        {
+          "name": "ATLAS_SERVER",
+          "identities": [
+            {
+              "name": "atlas",
+              "principal": {
+                "value": "atlas/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "application-properties/atlas.jaas.KafkaClient.option.principal",
+                "local_username" : "${atlas-env/metadata_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/atlas.service.keytab",
+                "owner": {
+                  "name": "${atlas-env/metadata_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "application-properties/atlas.jaas.KafkaClient.option.keyTab"
+              }
+            },
+            {
+              "name": "ranger_atlas_audit",
+              "reference": "/ATLAS/ATLAS_SERVER/atlas",
+              "principal": {
+                "configuration": "ranger-atlas-audit/xasecure.audit.jaas.Client.option.principal"
+              },
+              "keytab": {
+                "configuration": "ranger-atlas-audit/xasecure.audit.jaas.Client.option.keyTab"
+              }
+            }
+          ]
+        }
+      ]
+    },
+    {
+      "name": "LOGSEARCH",
+      "identities": [
+        {
+          "name": "/spnego"
+        }
+      ],
+      "components": [
+        {
+          "name": "LOGSEARCH_SERVER",
+          "identities": [
+            {
+              "name": "logsearch",
+              "principal": {
+                "value": "logsearch/_HOST@${realm}",
+                "type": "service",
+                "configuration": "logsearch-env/logsearch_kerberos_principal"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/logsearch.service.keytab",
+                "owner": {
+                  "name": "${logsearch-env/logsearch_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "logsearch-env/logsearch_kerberos_keytab"
+              }
+            },
+            {
+              "name": "/AMBARI_INFRA/INFRA_SOLR/infra-solr",
+              "when" : {
+                "contains" : ["services", "AMBARI_INFRA"]
+              }
+            }
+          ]
+        }
+      ]
+    },
+    {
+      "name": "RANGER",
+      "identities": [
+        {
+          "name": "/spnego"
+        }
+      ],
+      "components": [
+        {
+          "name": "RANGER_ADMIN",
+          "identities": [
+            {
+              "name": "rangeradmin",
+              "principal": {
+                "value": "rangeradmin/_HOST@${realm}",
+                "type" : "service",
+                "configuration": "ranger-admin-site/ranger.admin.kerberos.principal",
+                "local_username" : "${ranger-env/ranger_user}"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/rangeradmin.service.keytab",
+                "owner": {
+                  "name": "${ranger-env/ranger_user}",
+                  "access": "r"
+                },
+                "configuration": "ranger-admin-site/ranger.admin.kerberos.keytab"
+              }
+            },
+            {
+              "name": "/spnego",
+              "keytab": {
+                "configuration": "ranger-admin-site/ranger.spnego.kerberos.keytab"
+              }
+            }
+          ]
+        }
+      ]
+    },
+    {
+      "name": "STORM",
+      "identities": [
+        {
+          "name": "/spnego"
+        },
+        {
+          "name": "storm_components",
+          "principal": {
+            "value": "${storm-env/storm_user}${principal_suffix}@${realm}",
+            "type": "user",
+            "configuration": "storm-env/storm_principal_name"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/storm.headless.keytab",
+            "owner": {
+              "name": "${storm-env/storm_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+              "access": ""
+            },
+            "configuration": "storm-env/storm_keytab"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "NIMBUS",
+          "identities": [
+            {
+              "name": "nimbus_server",
+              "principal": {
+                "value": "nimbus/_HOST@${realm}",
+                "type": "service",
+                "configuration": "storm-env/nimbus_principal_name"
+              },
+              "keytab": {
+                "file": "${keytab_dir}/nimbus.service.keytab",
+                "owner": {
+                  "name": "${storm-env/storm_user}",
+                  "access": "r"
+                },
+                "group": {
+                  "name": "${cluster-env/user_group}",
+                  "access": ""
+                },
+                "configuration": "storm-env/nimbus_keytab"
+              }
+            },
+            {
+              "name": "/STORM/NIMBUS/nimbus_server",
+              "principal": {
+                "configuration": "ranger-storm-audit/xasecure.audit.jaas.Client.option.principal"
+              },
+              "keytab": {
+                "configuration": "ranger-storm-audit/xasecure.audit.jaas.Client.option.keyTab"
+              }
+            }
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file