You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2018/06/15 17:18:38 UTC

[ambari] branch trunk updated: AMBARI-24112. Requests failed after Ambari upgrade with exception while executing custom service command. (#1547)

This is an automated email from the ASF dual-hosted git repository.

mpapirkovskyy pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/trunk by this push:
     new acffdfc  AMBARI-24112. Requests failed after Ambari upgrade with exception while executing custom service command. (#1547)
acffdfc is described below

commit acffdfcadf7c6da900f1adc5b1d059010904ebe0
Author: Myroslav Papirkovskyi <mp...@apache.org>
AuthorDate: Fri Jun 15 20:18:24 2018 +0300

    AMBARI-24112. Requests failed after Ambari upgrade with exception while executing custom service command. (#1547)
    
    * AMBARI-24112. Requests failed after Ambari upgrade with exception while executing custom service command. (mpapirkovskyy)
    
    * AMBARI-24112. Requests failed after Ambari upgrade with exception while executing custom service command. (mpapirkovskyy)
---
 .../server/agent/stomp/AgentClusterDataHolder.java |  7 +----
 .../server/agent/stomp/AgentHostDataHolder.java    |  8 +-----
 .../controller/AmbariManagementControllerImpl.java |  2 +-
 .../ambari/server/state/cluster/ClustersImpl.java  | 32 +++++++++++++---------
 4 files changed, 22 insertions(+), 27 deletions(-)

diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentClusterDataHolder.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentClusterDataHolder.java
index afb4273..2c7b26e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentClusterDataHolder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentClusterDataHolder.java
@@ -60,17 +60,12 @@ public abstract class AgentClusterDataHolder<T extends STOMPEvent & Hashable> ex
    * @return true if the update introduced any change
    */
   public boolean updateData(T update) throws AmbariException {
-    initializeDataIfNeeded(false);
+    initializeDataIfNeeded(true);
     boolean changed = handleUpdate(update);
     if (changed) {
       regenerateDataIdentifiers(data);
       update.setHash(getData().getHash());
       STOMPUpdatePublisher.publish(update);
-    } else {
-      // in case update does not have changes empty identifiers should be populated anyway
-      if (!isIdentifierValid(data)) {
-        regenerateDataIdentifiers(data);
-      }
     }
     return changed;
   }
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentHostDataHolder.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentHostDataHolder.java
index 06fcc17..1e2f56a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentHostDataHolder.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/stomp/AgentHostDataHolder.java
@@ -69,7 +69,7 @@ public abstract class AgentHostDataHolder<T extends STOMPHostEvent & Hashable> e
    * event to listeners.
    */
   public final void updateData(T update) throws AmbariException {
-    initializeDataIfNeeded(update.getHostId(), false);
+    initializeDataIfNeeded(update.getHostId(), true);
     if (handleUpdate(update)) {
       T hostData = getData(update.getHostId());
       regenerateDataIdentifiers(hostData);
@@ -78,12 +78,6 @@ public abstract class AgentHostDataHolder<T extends STOMPHostEvent & Hashable> e
         LOG.info("Configs update with hash {} will be sent to host {}", update.getHash(), hostData.getHostId());
       }
       STOMPUpdatePublisher.publish(update);
-    } else {
-      // in case update does not have changes empty identifiers should be populated anyway
-      T hostData = getData(update.getHostId());
-      if (!isIdentifierValid(hostData)) {
-        regenerateDataIdentifiers(hostData);
-      }
     }
   }
 
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 60c20d4..2f4ae73 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -5830,7 +5830,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     return serviceLevelParams;
   }
 
-  public TreeMap<String, String> getMetadataAmbariLevelParams() throws AmbariException {
+  public TreeMap<String, String> getMetadataAmbariLevelParams() {
     TreeMap<String, String> clusterLevelParams = new TreeMap<>();
     clusterLevelParams.put(JDK_LOCATION, getJdkResourceUrl());
     clusterLevelParams.put(JAVA_HOME, getJavaHome());
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
index a9fbe06..1100e09 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClustersImpl.java
@@ -280,37 +280,43 @@ public class ClustersImpl implements Clusters {
   private void loadClustersAndHosts() {
     LOG.info("Initializing cluster and host data.");
 
-    clustersByName = new ConcurrentHashMap<>();
-    clustersById = new ConcurrentHashMap<>();
-    hostsByName = new ConcurrentHashMap<>();
-    hostsById = new ConcurrentHashMap<>();
-    hostClustersMap = new ConcurrentHashMap<>();
-    clusterHostsMap1 = new ConcurrentHashMap<>();
+    ConcurrentHashMap<String, Cluster> clustersByNameTemp = new ConcurrentHashMap<>();
+    ConcurrentHashMap<Long, Cluster> clustersByIdTemp = new ConcurrentHashMap<>();
+    ConcurrentHashMap<String, Host> hostsByNameTemp = new ConcurrentHashMap<>();
+    ConcurrentHashMap<Long, Host> hostsByIdTemp = new ConcurrentHashMap<>();
+    ConcurrentHashMap<String, Set<Cluster>> hostClustersMapTemp = new ConcurrentHashMap<>();
+    ConcurrentHashMap<String, Set<Host>> clusterHostsMap1Temp = new ConcurrentHashMap<>();
 
     List<HostEntity> hostEntities = hostDAO.findAll();
     for (HostEntity hostEntity : hostEntities) {
       Host host = hostFactory.create(hostEntity);
-      hostsByName.put(hostEntity.getHostName(), host);
-      hostsById.put(hostEntity.getHostId(), host);
+      hostsByNameTemp.put(hostEntity.getHostName(), host);
+      hostsByIdTemp.put(hostEntity.getHostId(), host);
     }
+    hostsByName = hostsByNameTemp;
+    hostsById = hostsByIdTemp;
 
     for (ClusterEntity clusterEntity : clusterDAO.findAll()) {
       Cluster currentCluster = clusterFactory.create(clusterEntity);
-      clustersByName.put(clusterEntity.getClusterName(), currentCluster);
-      clustersById.put(currentCluster.getClusterId(), currentCluster);
-      clusterHostsMap1.put(currentCluster.getClusterName(), Collections.newSetFromMap(new ConcurrentHashMap<>()));
+      clustersByNameTemp.put(clusterEntity.getClusterName(), currentCluster);
+      clustersByIdTemp.put(currentCluster.getClusterId(), currentCluster);
+      clusterHostsMap1Temp.put(currentCluster.getClusterName(), Collections.newSetFromMap(new ConcurrentHashMap<>()));
     }
+    clustersByName = clustersByNameTemp;
+    clustersById = clustersByIdTemp;
 
     for (HostEntity hostEntity : hostEntities) {
       Set<Cluster> cSet = Collections.newSetFromMap(new ConcurrentHashMap<Cluster, Boolean>());
-      hostClustersMap.put(hostEntity.getHostName(), cSet);
+      hostClustersMapTemp.put(hostEntity.getHostName(), cSet);
 
       Host host = getHostsByName().get(hostEntity.getHostName());
       for (ClusterEntity clusterEntity : hostEntity.getClusterEntities()) {
-        clusterHostsMap1.get(clusterEntity.getClusterName()).add(host);
+        clusterHostsMap1Temp.get(clusterEntity.getClusterName()).add(host);
         cSet.add(clustersByName.get(clusterEntity.getClusterName()));
       }
     }
+    hostClustersMap = hostClustersMapTemp;
+    clusterHostsMap1 = clusterHostsMap1Temp;
     // init host configs
     for (Long hostId : hostsById.keySet()) {
       try {

-- 
To stop receiving notification emails like this one, please contact
mpapirkovskyy@apache.org.