You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/24 16:08:20 UTC

[33/50] [abbrv] ambari git commit: AMBARI-15094 - Patch Upgrade UpgradeCatalog Changes (jonathanhurley)

AMBARI-15094 - Patch Upgrade UpgradeCatalog Changes (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4f78af7a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4f78af7a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4f78af7a

Branch: refs/heads/trunk
Commit: 4f78af7ae47564fdc6a2c013cf1757f19005e6a5
Parents: 9873e69
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Feb 18 13:23:43 2016 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Feb 19 11:11:30 2016 -0500

----------------------------------------------------------------------
 .../apache/ambari/server/orm/DBAccessor.java    |  16 +-
 .../ambari/server/orm/DBAccessorImpl.java       |  86 ++++++-
 .../server/orm/helpers/dbms/DbmsHelper.java     |  12 +-
 .../orm/helpers/dbms/GenericDbmsHelper.java     |  10 +
 .../server/upgrade/UpgradeCatalog240.java       | 225 ++++++++++++++++---
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   1 +
 .../server/upgrade/UpgradeCatalog240Test.java   | 119 +++++++---
 7 files changed, 397 insertions(+), 72 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
index 49f108b..8142661 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java
@@ -545,7 +545,9 @@ public interface DBAccessor {
   /**
    * Queries the database to determine the name of the primary key constraint on
    * the specified table. Currently, this is only implemented for
-   * {@link DatabaseType#ORACLE} and {@link DatabaseType#SQL_SERVER}.
+   * {@link DatabaseType#POSTGRES}, {@link DatabaseType#ORACLE} and
+   * {@link DatabaseType#SQL_SERVER}. {@link DatabaseType#MYSQL} does not need
+   * this since PKs can be dropped without referencing their name.
    *
    * @param tableName
    *          the name of the table to lookup the PK constraint.
@@ -554,6 +556,18 @@ public interface DBAccessor {
    */
   String getPrimaryKeyConstraintName(String tableName) throws SQLException;
 
+  /**
+   * Attempts to drop the discovered PRIMARY KEY constraint on the specified
+   * table, defaulting to the specified default if not found.
+   *
+   * @param tableName
+   *          the table to drop the PK from (not {@code null}).
+   * @param defaultConstraintName
+   *          the default name of the PK constraint if none is found.
+   * @throws SQLException
+   */
+  void dropPKConstraint(String tableName, String defaultConstraintName) throws SQLException;
+
   enum DbType {
     ORACLE,
     MYSQL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
index abd05bc..329fea8 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessorImpl.java
@@ -31,12 +31,12 @@ import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
 import java.sql.Statement;
 import java.sql.Types;
-import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.configuration.Configuration.DatabaseType;
 import org.apache.ambari.server.orm.helpers.ScriptRunner;
 import org.apache.ambari.server.orm.helpers.dbms.DbmsHelper;
 import org.apache.ambari.server.orm.helpers.dbms.DerbyHelper;
@@ -45,6 +45,7 @@ import org.apache.ambari.server.orm.helpers.dbms.MySqlHelper;
 import org.apache.ambari.server.orm.helpers.dbms.OracleHelper;
 import org.apache.ambari.server.orm.helpers.dbms.PostgresHelper;
 import org.apache.ambari.server.utils.CustomStringUtils;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
 import org.eclipse.persistence.internal.helper.DBPlatformHelper;
 import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl;
@@ -146,12 +147,19 @@ public class DBAccessorImpl implements DBAccessor {
 
   @Override
   public void createTable(String tableName, List<DBColumnInfo> columnInfo,
-          String... primaryKeyColumns) throws SQLException {
-    if (!tableExists(tableName)) {
-      String query = dbmsHelper.getCreateTableStatement(tableName, columnInfo, Arrays.asList(primaryKeyColumns));
-
-      executeQuery(query);
+      String... primaryKeyColumns) throws SQLException {
+    // do nothing if the table already exists
+    if (tableExists(tableName)) {
+      return;
     }
+
+    // guard against null PKs
+    primaryKeyColumns = ArrayUtils.nullToEmpty(primaryKeyColumns);
+
+    String query = dbmsHelper.getCreateTableStatement(tableName, columnInfo,
+        Arrays.asList(primaryKeyColumns));
+
+    executeQuery(query);
   }
 
   protected DatabaseMetaData getDatabaseMetaData() throws SQLException {
@@ -772,6 +780,14 @@ public class DBAccessorImpl implements DBAccessor {
     if (checkedConstraintName != null) {
       String query = dbmsHelper.getDropFKConstraintStatement(tableName, checkedConstraintName);
       executeQuery(query, ignoreFailure);
+
+      // MySQL also adds indexes in addition to the FK which should be dropped
+      Configuration.DatabaseType databaseType = configuration.getDatabaseType();
+      if (databaseType == DatabaseType.MYSQL) {
+        query = dbmsHelper.getDropIndexStatement(constraintName, tableName);
+        executeQuery(query, true);
+      }
+
     } else {
       LOG.warn("Constraint {} from {} table not found, nothing to drop", constraintName, tableName);
     }
@@ -1022,9 +1038,9 @@ public class DBAccessorImpl implements DBAccessor {
 
     switch (databaseType) {
       case ORACLE: {
-        String lookupPrimaryKeyNameSql = MessageFormat.format(
-            "SELECT constraint_name FROM all_constraints WHERE table_name = ''{0}'' AND constraint_type = ''P''",
-            tableName.toUpperCase());
+        String lookupPrimaryKeyNameSql = String.format(
+            "SELECT constraint_name FROM all_constraints WHERE UPPER(table_name) = UPPER('%s') AND constraint_type = 'P'",
+            tableName);
 
         try {
           statement = getConnection().createStatement();
@@ -1040,8 +1056,8 @@ public class DBAccessorImpl implements DBAccessor {
         break;
       }
       case SQL_SERVER: {
-        String lookupPrimaryKeyNameSql = MessageFormat.format(
-            "SELECT constraint_name FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE OBJECTPROPERTY(OBJECT_ID(constraint_name), 'IsPrimaryKey') = 1 AND table_name = {0}",
+        String lookupPrimaryKeyNameSql = String.format(
+            "SELECT constraint_name FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE WHERE OBJECTPROPERTY(OBJECT_ID(constraint_name), 'IsPrimaryKey') = 1 AND table_name = '%s'",
             tableName);
 
         try {
@@ -1055,6 +1071,25 @@ public class DBAccessorImpl implements DBAccessor {
           JdbcUtils.closeStatement(statement);
         }
 
+        break;
+      }
+      case POSTGRES: {
+        String lookupPrimaryKeyNameSql = String.format(
+            "SELECT constraint_name FROM information_schema.table_constraints AS tc WHERE tc.constraint_type = 'PRIMARY KEY' AND table_name = '%s'",
+            tableName);
+
+        try {
+          statement = getConnection().createStatement();
+          resultSet = statement.executeQuery(lookupPrimaryKeyNameSql);
+          if (resultSet.next()) {
+            primaryKeyConstraintName = resultSet.getString("constraint_name");
+          }
+        } finally {
+          JdbcUtils.closeResultSet(resultSet);
+          JdbcUtils.closeStatement(statement);
+        }
+
+        break;
       }
       default:
         break;
@@ -1063,4 +1098,33 @@ public class DBAccessorImpl implements DBAccessor {
     return primaryKeyConstraintName;
   }
 
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void dropPKConstraint(String tableName, String defaultConstraintName) throws SQLException {
+    Configuration.DatabaseType databaseType = configuration.getDatabaseType();
+
+    // drop the PK directly if MySQL since it supports it
+    if (databaseType == DatabaseType.MYSQL) {
+      String mysqlDropQuery = String.format("ALTER TABLE %s DROP PRIMARY KEY", tableName);
+      executeQuery(mysqlDropQuery, true);
+      return;
+    }
+
+    // discover the PK name, using the default if none found
+    String primaryKeyConstraintName = getPrimaryKeyConstraintName(tableName);
+    if (null == primaryKeyConstraintName) {
+      primaryKeyConstraintName = defaultConstraintName;
+      LOG.warn("Unable to dynamically determine the PK constraint name for {}, defaulting to {}",
+          tableName, defaultConstraintName);
+    }
+
+    // warn if we can't find it
+    if (null == primaryKeyConstraintName) {
+      LOG.warn("Unable to determine the primary key constraint name for {}", tableName);
+    } else {
+      dropPKConstraint(tableName, primaryKeyConstraintName, true);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
index cdc1b4a..30c06fb 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/DbmsHelper.java
@@ -59,6 +59,16 @@ public interface DbmsHelper {
                                  String... columnNames);
 
   /**
+   * Gets DROP INDEX statement
+   *
+   * @param indexName
+   * @param tableName
+   * @param columnNames
+   * @return
+   */
+  String getDropIndexStatement(String indexName, String tableName);
+
+  /**
    * Generate alter table statement to add unique constraint
    * @param tableName name of the table
    * @param constraintName name of the constraint
@@ -102,7 +112,7 @@ public interface DbmsHelper {
 
   /**
    * Gets the {@code SET NULL} or {@code SET NOT NULL} statement.
-   * 
+   *
    * @param tableName
    *          the table (not {@code null}).
    * @param columnInfo

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
index 8dfb8ba..21fa361 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/helpers/dbms/GenericDbmsHelper.java
@@ -243,6 +243,16 @@ public class GenericDbmsHelper implements DbmsHelper {
     return createIndex;
   }
 
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public String getDropIndexStatement(String indexName, String tableName) {
+    String dropIndex = databasePlatform.buildDropIndex(tableName, indexName);
+    return dropIndex;
+  }
+
   @Override
   public String getAddUniqueConstraintStatement(String tableName, String constraintName, String... columnNames){
     UniqueKeyConstraint uniqueKeyConstraint = new UniqueKeyConstraint();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index d97962f..09f31e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -18,17 +18,21 @@
 
 package org.apache.ambari.server.upgrade;
 
-import com.google.common.collect.Lists;
-import com.google.gson.JsonArray;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParser;
-import com.google.gson.JsonPrimitive;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import java.sql.Clob;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicLong;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.AmbariManagementController;
-import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
 import org.apache.ambari.server.orm.dao.PermissionDAO;
@@ -37,19 +41,20 @@ import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.RepositoryType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.support.JdbcUtils;
 
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
+import com.google.common.collect.Lists;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.gson.JsonPrimitive;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.persist.Transactional;
 
 /**
  * Upgrade catalog for version 2.4.0.
@@ -59,6 +64,13 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   protected static final String ADMIN_PERMISSION_TABLE = "adminpermission";
   protected static final String PERMISSION_ID_COL = "permission_name";
   protected static final String SORT_ORDER_COL = "sort_order";
+  protected static final String REPO_VERSION_TABLE = "repo_version";
+  protected static final String SERVICE_COMPONENT_DS_TABLE = "servicecomponentdesiredstate";
+  protected static final String HOST_COMPONENT_DS_TABLE = "hostcomponentdesiredstate";
+  protected static final String HOST_COMPONENT_STATE_TABLE = "hostcomponentstate";
+  protected static final String SERVICE_COMPONENT_HISTORY_TABLE = "servicecomponent_history";
+  protected static final String UPGRADE_TABLE = "upgrade";
+  protected static final String STACK_TABLE = "stack";
 
   @Inject
   DaoUtils daoUtils;
@@ -116,6 +128,9 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     updateAdminPermissionTable();
     createSettingTable();
+    updateRepoVersionTableDDL();
+    updateServiceComponentDesiredStateTableDDL();
+    createServiceComponentHistoryTable();
   }
 
   @Override
@@ -132,17 +147,17 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   }
 
   private void createSettingTable() throws SQLException {
-    List<DBAccessor.DBColumnInfo> columns = new ArrayList<>();
+    List<DBColumnInfo> columns = new ArrayList<>();
 
     //  Add setting table
     LOG.info("Creating " + SETTING_TABLE + " table");
 
-    columns.add(new DBAccessor.DBColumnInfo(ID, Long.class, null, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("name", String.class, 255, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("setting_type", String.class, 255, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("content", String.class, 3000, null, false));
-    columns.add(new DBAccessor.DBColumnInfo("updated_by", String.class, 255, "_db", false));
-    columns.add(new DBAccessor.DBColumnInfo("update_timestamp", Long.class, null, null, false));
+    columns.add(new DBColumnInfo(ID, Long.class, null, null, false));
+    columns.add(new DBColumnInfo("name", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("setting_type", String.class, 255, null, false));
+    columns.add(new DBColumnInfo("content", String.class, 3000, null, false));
+    columns.add(new DBColumnInfo("updated_by", String.class, 255, "_db", false));
+    columns.add(new DBColumnInfo("update_timestamp", Long.class, null, null, false));
     dbAccessor.createTable(SETTING_TABLE, columns, ID);
     addSequence("setting_id_seq", 0L, false);
   }
@@ -368,7 +383,8 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
   protected void updateAdminPermissionTable() throws SQLException {
     // Add the sort_order column to the adminpermission table
-    dbAccessor.addColumn(ADMIN_PERMISSION_TABLE, new DBAccessor.DBColumnInfo(SORT_ORDER_COL, Short.class, null, 1, false));
+    dbAccessor.addColumn(ADMIN_PERMISSION_TABLE,
+        new DBColumnInfo(SORT_ORDER_COL, Short.class, null, 1, false));
   }
 
   protected void setRoleSortOrder() throws SQLException {
@@ -391,4 +407,159 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
         7, PermissionEntity.VIEW_USER_PERMISSION_NAME));
   }
 
+  /**
+   * Makes the following changes to the {@value #REPO_VERSION_TABLE} table:
+   * <ul>
+   * <li>repo_type VARCHAR(255) DEFAULT 'STANDARD' NOT NULL</li>
+   * <li>version_url VARCHAR(1024)</li>
+   * <li>version_xml MEDIUMTEXT</li>
+   * <li>version_xsd VARCHAR(512)</li>
+   * <li>parent_id BIGINT</li>
+   * </ul>
+   *
+   * @throws SQLException
+   */
+  private void updateRepoVersionTableDDL() throws SQLException {
+    DBColumnInfo repoTypeColumn = new DBColumnInfo("repo_type", String.class, 255, RepositoryType.STANDARD.name(), false);
+    DBColumnInfo versionUrlColumn = new DBColumnInfo("version_url", String.class, 1024, null, true);
+    DBColumnInfo versionXmlColumn = new DBColumnInfo("version_xml", Clob.class, null, null, true);
+    DBColumnInfo versionXsdColumn = new DBColumnInfo("version_xsd", String.class, 512, null, true);
+    DBColumnInfo parentIdColumn = new DBColumnInfo("parent_id", Long.class, null, null, true);
+
+    dbAccessor.addColumn(REPO_VERSION_TABLE, repoTypeColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, versionUrlColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, versionXmlColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, versionXsdColumn);
+    dbAccessor.addColumn(REPO_VERSION_TABLE, parentIdColumn);
+  }
+
+  /**
+   * Makes the following changes to the {@value #SERVICE_COMPONENT_DS_TABLE} table,
+   * but only if the table doesn't have it's new PK set.
+   * <ul>
+   * <li>id BIGINT NOT NULL</li>
+   * <li>Drops FKs on {@value #HOST_COMPONENT_DS_TABLE} and {@value #HOST_COMPONENT_STATE_TABLE}</li>
+   * <li>Populates {@value #SQLException#ID} in {@value #SERVICE_COMPONENT_DS_TABLE}</li>
+   * <li>Creates {@code UNIQUE} constraint on {@value #HOST_COMPONENT_DS_TABLE}</li>
+   * <li>Adds FKs on {@value #HOST_COMPONENT_DS_TABLE} and {@value #HOST_COMPONENT_STATE_TABLE}</li>
+   * <li>Adds new sequence value of {@code servicecomponentdesiredstate_id_seq}</li>
+   * </ul>
+   *
+   * @throws SQLException
+   */
+  @Transactional
+  private void updateServiceComponentDesiredStateTableDDL() throws SQLException {
+    if (dbAccessor.tableHasPrimaryKey(SERVICE_COMPONENT_DS_TABLE, ID)) {
+      LOG.info("Skipping {} table Primary Key modifications since the new {} column already exists",
+          SERVICE_COMPONENT_DS_TABLE, ID);
+
+      return;
+    }
+
+    // drop FKs to SCDS in both HCDS and HCS tables
+    dbAccessor.dropFKConstraint(HOST_COMPONENT_DS_TABLE, "hstcmpnntdesiredstatecmpnntnme");
+    dbAccessor.dropFKConstraint(HOST_COMPONENT_STATE_TABLE, "hstcomponentstatecomponentname");
+
+    // remove existing compound PK
+    dbAccessor.dropPKConstraint(SERVICE_COMPONENT_DS_TABLE, "servicecomponentdesiredstate_pkey");
+
+    // add new PK column to SCDS, making it nullable for now
+    DBColumnInfo idColumn = new DBColumnInfo(ID, Long.class, null, null, true);
+    dbAccessor.addColumn(SERVICE_COMPONENT_DS_TABLE, idColumn);
+
+    // populate SCDS id column
+    AtomicLong scdsIdCounter = new AtomicLong(1);
+    Statement statement = null;
+    ResultSet resultSet = null;
+    try {
+      statement = dbAccessor.getConnection().createStatement();
+      if (statement != null) {
+        String selectSQL = String.format("SELECT cluster_id, service_name, component_name FROM %s",
+            SERVICE_COMPONENT_DS_TABLE);
+
+        resultSet = statement.executeQuery(selectSQL);
+        while (null != resultSet && resultSet.next()) {
+          final Long clusterId = resultSet.getLong("cluster_id");
+          final String serviceName = resultSet.getString("service_name");
+          final String componentName = resultSet.getString("component_name");
+
+          String updateSQL = String.format(
+              "UPDATE %s SET %s = %d WHERE cluster_id = %d AND service_name = '%s' AND component_name = '%s'",
+              SERVICE_COMPONENT_DS_TABLE, ID, scdsIdCounter.getAndIncrement(), clusterId,
+              serviceName, componentName);
+
+          dbAccessor.executeQuery(updateSQL);
+        }
+      }
+    } finally {
+      JdbcUtils.closeResultSet(resultSet);
+      JdbcUtils.closeStatement(statement);
+    }
+
+    // make the column NON NULL now
+    dbAccessor.alterColumn(SERVICE_COMPONENT_DS_TABLE,
+        new DBColumnInfo(ID, Long.class, null, null, false));
+
+    // create a new PK, matching the name of the constraint found in SQL
+    dbAccessor.addPKConstraint(SERVICE_COMPONENT_DS_TABLE, "pk_sc_desiredstate", ID);
+
+    // create UNIQUE constraint, ensuring column order matches SQL files
+    String[] uniqueColumns = new String[] { "component_name", "service_name", "cluster_id" };
+    dbAccessor.addUniqueConstraint(SERVICE_COMPONENT_DS_TABLE, "unq_scdesiredstate_name",
+        uniqueColumns);
+
+    // add FKs back to SCDS in both HCDS and HCS tables
+    dbAccessor.addFKConstraint(HOST_COMPONENT_DS_TABLE, "hstcmpnntdesiredstatecmpnntnme",
+        uniqueColumns, SERVICE_COMPONENT_DS_TABLE, uniqueColumns, false);
+
+    dbAccessor.addFKConstraint(HOST_COMPONENT_STATE_TABLE, "hstcomponentstatecomponentname",
+        uniqueColumns, SERVICE_COMPONENT_DS_TABLE, uniqueColumns, false);
+
+    // Add sequence for SCDS id
+    addSequence("servicecomponentdesiredstate_id_seq", scdsIdCounter.get(), false);
+  }
+
+  /**
+   * Makes the following changes to the {@value #SERVICE_COMPONENT_HISTORY_TABLE} table:
+   * <ul>
+   * <li>id BIGINT NOT NULL</li>
+   * <li>component_id BIGINT NOT NULL</li>
+   * <li>upgrade_id BIGINT NOT NULL</li>
+   * <li>from_stack_id BIGINT NOT NULL</li>
+   * <li>to_stack_id BIGINT NOT NULL</li>
+   * <li>CONSTRAINT PK_sc_history PRIMARY KEY (id)</li>
+   * <li>CONSTRAINT FK_sc_history_component_id FOREIGN KEY (component_id) REFERENCES servicecomponentdesiredstate (id)</li>
+   * <li>CONSTRAINT FK_sc_history_upgrade_id FOREIGN KEY (upgrade_id) REFERENCES upgrade (upgrade_id)</li>
+   * <li>CONSTRAINT FK_sc_history_from_stack_id FOREIGN KEY (from_stack_id) REFERENCES stack (stack_id)</li>
+   * <li>CONSTRAINT FK_sc_history_to_stack_id FOREIGN KEY (to_stack_id) REFERENCES stack (stack_id)</li>
+   * <li>Creates the {@code servicecomponent_history_id_seq}</li>
+   * </ul>
+   *
+   * @throws SQLException
+   */
+  private void createServiceComponentHistoryTable() throws SQLException {
+    List<DBColumnInfo> columns = new ArrayList<>();
+    columns.add(new DBColumnInfo(ID, Long.class, null, null, false));
+    columns.add(new DBColumnInfo("component_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("upgrade_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("from_stack_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("to_stack_id", Long.class, null, null, false));
+    dbAccessor.createTable(SERVICE_COMPONENT_HISTORY_TABLE, columns, (String[]) null);
+
+    dbAccessor.addPKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "PK_sc_history", ID);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_component_id",
+        "component_id", SERVICE_COMPONENT_DS_TABLE, "id", false);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_upgrade_id",
+        "upgrade_id", UPGRADE_TABLE, "upgrade_id", false);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_from_stack_id",
+        "from_stack_id", STACK_TABLE, "stack_id", false);
+
+    dbAccessor.addFKConstraint(SERVICE_COMPONENT_HISTORY_TABLE, "FK_sc_history_to_stack_id",
+        "to_stack_id", STACK_TABLE, "stack_id", false);
+
+    addSequence("servicecomponent_history_id_seq", 0L, false);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 3ec982a..b892bc8 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -1020,6 +1020,7 @@ INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_re
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('topology_host_group_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('setting_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('hostcomponentstate_id_seq', 0);
+INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponentdesiredstate_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values ('servicecomponent_history_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4f78af7a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index a145253..95ae8d8 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -19,12 +19,33 @@
 package org.apache.ambari.server.upgrade;
 
 
-import com.google.inject.Binder;
-import com.google.inject.Guice;
-import com.google.inject.Injector;
-import com.google.inject.Module;
-import com.google.inject.Provider;
-import junit.framework.Assert;
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.eq;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.newCapture;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.persistence.EntityManager;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
@@ -32,40 +53,22 @@ import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
+import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
-import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import javax.persistence.EntityManager;
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
-import java.sql.Connection;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.Provider;
 
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMockBuilder;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.eq;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.newCapture;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.assertEquals;
+import junit.framework.Assert;
 
 public class UpgradeCatalog240Test {
   private static Injector injector;
@@ -114,7 +117,44 @@ public class UpgradeCatalog240Test {
     expect(connection.createStatement()).andReturn(statement);
     expect(statement.executeQuery(anyObject(String.class))).andReturn(resultSet);
 
-    replay(dbAccessor);
+    Capture<DBAccessor.DBColumnInfo> repoVersionRepoTypeColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionUrlColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionXmlColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionXsdColumnCapture = newCapture();
+    Capture<DBAccessor.DBColumnInfo> repoVersionParentIdColumnCapture = newCapture();
+
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionRepoTypeColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionUrlColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionXmlColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionXsdColumnCapture));
+    dbAccessor.addColumn(eq("repo_version"), capture(repoVersionParentIdColumnCapture));
+
+    // skip all of the drama of the servicecomponentdesiredstate table for now
+    expect(dbAccessor.tableHasPrimaryKey("servicecomponentdesiredstate", "id")).andReturn(true);
+
+    Capture<List<DBAccessor.DBColumnInfo>> capturedHistoryColumns = EasyMock.newCapture();
+    dbAccessor.createTable(eq("servicecomponent_history"), capture(capturedHistoryColumns),
+        eq((String[]) null));
+
+    dbAccessor.addPKConstraint("servicecomponent_history", "PK_sc_history", "id");
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_component_id",
+        "component_id", "servicecomponentdesiredstate", "id", false);
+
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_upgrade_id", "upgrade_id",
+        "upgrade", "upgrade_id", false);
+
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_from_stack_id",
+        "from_stack_id", "stack", "stack_id", false);
+
+    dbAccessor.addFKConstraint("servicecomponent_history", "FK_sc_history_to_stack_id",
+        "to_stack_id", "stack", "stack_id", false);
+
+    expect(dbAccessor.getConnection()).andReturn(connection);
+    expect(connection.createStatement()).andReturn(statement);
+    expect(statement.executeQuery(anyObject(String.class))).andReturn(resultSet);
+
+    replay(dbAccessor, configuration, connection, statement, resultSet);
+
     Module module = new Module() {
       @Override
       public void configure(Binder binder) {
@@ -148,6 +188,21 @@ public class UpgradeCatalog240Test {
     for(DBAccessor.DBColumnInfo settingColumnInfo : capturedSettingColumns.getValue()) {
       actualCaptures.put(settingColumnInfo.getName(), settingColumnInfo.getType());
     }
+
+    assertEquals(expectedCaptures, actualCaptures);
+
+    expectedCaptures = new HashMap<>();
+    expectedCaptures.put("id", Long.class);
+    expectedCaptures.put("component_id", Long.class);
+    expectedCaptures.put("upgrade_id", Long.class);
+    expectedCaptures.put("from_stack_id", Long.class);
+    expectedCaptures.put("to_stack_id", Long.class);
+
+    actualCaptures = new HashMap<>();
+    for (DBAccessor.DBColumnInfo historyColumnInfo : capturedHistoryColumns.getValue()) {
+      actualCaptures.put(historyColumnInfo.getName(), historyColumnInfo.getType());
+    }
+
     assertEquals(expectedCaptures, actualCaptures);
 
     verify(dbAccessor);