You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2019/09/30 16:11:47 UTC

[ambari] branch branch-2.7 updated: AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (#3087)

This is an automated email from the ASF dual-hosted git repository.

mpapirkovskyy pushed a commit to branch branch-2.7
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-2.7 by this push:
     new 7eb83d0  AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (#3087)
7eb83d0 is described below

commit 7eb83d0803d54cdb0e75964a6e08f028cad93417
Author: Myroslav Papirkovskyi <mp...@apache.org>
AuthorDate: Mon Sep 30 19:11:39 2019 +0300

    AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (#3087)
    
    * AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (mpapirkovskyy)
    
    * AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (mpapirkovskyy)
---
 .../ambari/server/upgrade/SchemaUpgradeHelper.java |   1 +
 .../ambari/server/upgrade/UpgradeCatalog275.java   | 120 +++++++++++++++++++++
 .../server/upgrade/UpgradeCatalog275Test.java      |  91 ++++++++++++++++
 3 files changed, 212 insertions(+)

diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
index 4fa8030..29fbdc2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java
@@ -193,6 +193,7 @@ public class SchemaUpgradeHelper {
       catalogBinder.addBinding().to(UpgradeCatalog272.class);
       catalogBinder.addBinding().to(UpgradeCatalog273.class);
       catalogBinder.addBinding().to(UpgradeCatalog274.class);
+      catalogBinder.addBinding().to(UpgradeCatalog275.class);
       catalogBinder.addBinding().to(UpdateAlertScriptPaths.class);
       catalogBinder.addBinding().to(FinalUpgradeCatalog.class);
 
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog275.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog275.java
new file mode 100644
index 0000000..d221c0e
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog275.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.upgrade;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.orm.dao.BlueprintDAO;
+import org.apache.ambari.server.orm.entities.BlueprintConfigEntity;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+
+/**
+ * The {@link UpgradeCatalog275} upgrades Ambari from 2.7.4 to 2.7.5.
+ */
+public class UpgradeCatalog275 extends AbstractUpgradeCatalog {
+
+  static final Gson GSON = new Gson();
+
+
+  @Inject
+  public UpgradeCatalog275(Injector injector) {
+    super(injector);
+  }
+
+  @Override
+  public String getSourceVersion() {
+    return "2.7.4";
+  }
+
+  @Override
+  public String getTargetVersion() {
+    return "2.7.5";
+  }
+
+  /**
+   * Perform database schema transformation. Can work only before persist service start
+   *
+   * @throws AmbariException
+   * @throws SQLException
+   */
+  @Override
+  protected void executeDDLUpdates() throws AmbariException, SQLException {
+
+  }
+
+  /**
+   * Perform data insertion before running normal upgrade of data, requires started persist service
+   *
+   * @throws AmbariException
+   * @throws SQLException
+   */
+  @Override
+  protected void executePreDMLUpdates() throws AmbariException, SQLException {
+    removeDfsHAInitial();
+  }
+
+  /**
+   * Performs normal data upgrade
+   *
+   * @throws AmbariException
+   * @throws SQLException
+   */
+  @Override
+  protected void executeDMLUpdates() throws AmbariException, SQLException {
+    addNewConfigurationsFromXml();
+  }
+
+  protected void removeDfsHAInitial() {
+    BlueprintDAO blueprintDAO = injector.getInstance(BlueprintDAO.class);
+    List<BlueprintEntity> blueprintEntityList = blueprintDAO.findAll();
+    List<BlueprintEntity> changedBlueprints = new ArrayList<>();
+    for (BlueprintEntity blueprintEntity : blueprintEntityList){
+      boolean changed = false;
+      Collection<BlueprintConfigEntity> blueprintConfigurations = blueprintEntity.getConfigurations();
+      for (BlueprintConfigEntity blueprintConfigEntity : blueprintConfigurations) {
+        if (blueprintConfigEntity.getType().equals("hadoop-env")) {
+          String configData = blueprintConfigEntity.getConfigData();
+
+          Map<String, String> typeProperties = GSON.<Map<String, String>>fromJson(
+            configData, Map.class);
+
+          typeProperties.remove("dfs_ha_initial_namenode_standby");
+          typeProperties.remove("dfs_ha_initial_namenode_active");
+
+          blueprintConfigEntity.setConfigData(GSON.toJson(typeProperties));
+          changed = true;
+        }
+      }
+      if (changed) {
+        changedBlueprints.add(blueprintEntity);
+      }
+    }
+    for (BlueprintEntity blueprintEntity : changedBlueprints) {
+      blueprintDAO.merge(blueprintEntity);
+    }
+  }
+}
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog275Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog275Test.java
new file mode 100644
index 0000000..e606c40
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog275Test.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.upgrade;
+
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.server.orm.dao.BlueprintDAO;
+import org.apache.ambari.server.orm.entities.BlueprintConfigEntity;
+import org.apache.ambari.server.orm.entities.BlueprintEntity;
+import org.easymock.Capture;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.inject.Injector;
+
+public class UpgradeCatalog275Test {
+
+  @Test
+  public void testRemoveDfsHAInitial() {
+    Injector injector = createNiceMock(Injector.class);
+    BlueprintDAO blueprintDAO = createMock(BlueprintDAO.class);
+
+    BlueprintConfigEntity blueprintConfigEntity = new BlueprintConfigEntity();
+    blueprintConfigEntity.setType("hadoop-env");
+    blueprintConfigEntity.setConfigData("{\"dfs_ha_initial_namenode_standby\":\"%HOSTGROUP::master_2%\"," +
+                                          "\"dfs_ha_initial_namenode_active\":\"u1602.ambari.apache.org\"}");
+
+    List<BlueprintConfigEntity> blueprintConfigurations = Collections.singletonList(blueprintConfigEntity);
+
+    BlueprintEntity blueprintEntity = new BlueprintEntity();
+    blueprintEntity.setConfigurations(blueprintConfigurations);
+
+    List<BlueprintEntity> blueprintEntityList = Collections.singletonList(blueprintEntity);
+
+    expect(injector.getInstance(BlueprintDAO.class)).andReturn(blueprintDAO);
+    expect(blueprintDAO.findAll()).andReturn(blueprintEntityList);
+
+    Capture<BlueprintEntity> blueprintEntityCapture = Capture.newInstance();
+    expect(blueprintDAO.merge(capture(blueprintEntityCapture))).andReturn(null);
+
+    replay(injector, blueprintDAO);
+
+    UpgradeCatalog275 upgradeCatalog275 = new UpgradeCatalog275(injector);
+    upgradeCatalog275.removeDfsHAInitial();
+
+    verify(injector, blueprintDAO);
+
+    Assert.assertNotNull(blueprintEntityCapture.getValues());
+    Assert.assertEquals(1, blueprintEntityCapture.getValues().size());
+
+    BlueprintEntity blueprintEntityToMerge = blueprintEntityCapture.getValue();
+
+    Collection<BlueprintConfigEntity> resultConfigurations = blueprintEntityToMerge.getConfigurations();
+    for (BlueprintConfigEntity resultConfiguration : resultConfigurations) {
+      if (resultConfiguration.getType().equals("hadoop-env")) {
+        String configData = resultConfiguration.getConfigData();
+
+        Map<String, String> typeProperties = UpgradeCatalog275.GSON.<Map<String, String>>fromJson(
+          configData, Map.class);
+        Assert.assertEquals(0, typeProperties.size());
+        return;
+      }
+    }
+    Assert.fail("No \"hadoop-env\" config type was found in result configuration");
+  }
+}