You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sa...@apache.org on 2017/12/21 08:42:09 UTC

hive git commit: HIVE-18031: Support replication for Alter Database operation (Sankar Hariappan, reviewed by Anishek Agarwal)

Repository: hive
Updated Branches:
  refs/heads/master 21e18dea4 -> ad5bcb150


HIVE-18031: Support replication for Alter Database operation (Sankar Hariappan, reviewed by Anishek Agarwal)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ad5bcb15
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ad5bcb15
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ad5bcb15

Branch: refs/heads/master
Commit: ad5bcb150619764a0c9fccc42f056321ed18cca6
Parents: 21e18de
Author: Sankar Hariappan <sa...@apache.org>
Authored: Thu Dec 21 14:08:14 2017 +0530
Committer: Sankar Hariappan <sa...@apache.org>
Committed: Thu Dec 21 14:09:17 2017 +0530

----------------------------------------------------------------------
 .../listener/DbNotificationListener.java        |  16 +++
 .../hive/ql/parse/TestReplicationScenarios.java | 101 ++++++++++++++++---
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |  13 +--
 .../exec/repl/bootstrap/load/LoadDatabase.java  |  10 ++
 .../hive/ql/parse/DDLSemanticAnalyzer.java      |   2 +-
 .../hadoop/hive/ql/parse/repl/DumpType.java     |   7 ++
 .../repl/dump/events/AlterDatabaseHandler.java  |  42 ++++++++
 .../repl/dump/events/EventHandlerFactory.java   |   3 +-
 .../repl/load/message/AlterDatabaseHandler.java |  97 ++++++++++++++++++
 .../hadoop/hive/ql/plan/AlterDatabaseDesc.java  |  10 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |  31 +++++-
 .../hive/metastore/MetaStoreEventListener.java  |  10 +-
 .../metastore/MetaStoreListenerNotifier.java    |  13 ++-
 .../metastore/events/AlterDatabaseEvent.java    |  56 ++++++++++
 .../messaging/AlterDatabaseMessage.java         |  36 +++++++
 .../hive/metastore/messaging/EventMessage.java  |   1 +
 .../messaging/MessageDeserializer.java          |   7 ++
 .../metastore/messaging/MessageFactory.java     |   9 ++
 .../json/JSONAlterDatabaseMessage.java          |  96 ++++++++++++++++++
 .../messaging/json/JSONMessageDeserializer.java |  19 +++-
 .../messaging/json/JSONMessageFactory.java      |  22 +++-
 21 files changed, 561 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
----------------------------------------------------------------------
diff --git a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
index 67fc34d..4e9949c 100644
--- a/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
+++ b/hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.metastore.events.AddNotNullConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
+import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.AlterIndexEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
@@ -347,6 +348,21 @@ public class DbNotificationListener extends TransactionalMetaStoreEventListener
   }
 
   /**
+   * @param dbEvent alter database event
+   * @throws MetaException
+   */
+  @Override
+  public void onAlterDatabase(AlterDatabaseEvent dbEvent) throws MetaException {
+    Database oldDb = dbEvent.getOldDatabase();
+    Database newDb = dbEvent.getNewDatabase();
+    NotificationEvent event =
+            new NotificationEvent(0, now(), EventType.ALTER_DATABASE.toString(), msgFactory
+                    .buildAlterDatabaseMessage(oldDb, newDb).toString());
+    event.setDbName(oldDb.getName());
+    process(event, dbEvent);
+  }
+
+  /**
    * @param fnEvent function event
    * @throws MetaException
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 55acd1d..9062d43 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore;
 import org.apache.hadoop.hive.metastore.InjectableBehaviourObjectStore.BehaviourInjection;
 import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
 import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.NotNullConstraintsRequest;
@@ -1083,9 +1084,9 @@ public class TestReplicationScenarios {
   }
 
   @Test
-  public void testAlters() throws IOException {
+  public void testTableAlters() throws IOException {
 
-    String testName = "alters";
+    String testName = "TableAlters";
     String dbName = createDB(testName, driver);
     run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE", driver);
     run("CREATE TABLE " + dbName + ".unptned2(a string) STORED AS TEXTFILE", driver);
@@ -1246,6 +1247,63 @@ public class TestReplicationScenarios {
   }
 
   @Test
+  public void testDatabaseAlters() throws IOException {
+
+    String testName = "DatabaseAlters";
+    String dbName = createDB(testName, driver);
+    String replDbName = dbName + "_dupe";
+    String ownerName = "test";
+
+    run("ALTER DATABASE " + dbName + " SET OWNER USER " + ownerName, driver);
+
+    // Trigger bootstrap replication
+    Tuple bootstrap = bootstrapLoadAndVerify(dbName, replDbName);
+
+    try {
+      Database replDb = metaStoreClientMirror.getDatabase(replDbName);
+      assertEquals(ownerName, replDb.getOwnerName());
+      assertEquals("USER", replDb.getOwnerType().toString());
+    } catch (TException e) {
+      assertNull(e);
+    }
+
+    // Alter database set DB property
+    String testKey = "blah";
+    String testVal = "foo";
+    run("ALTER DATABASE " + dbName + " SET DBPROPERTIES ('" + testKey + "' = '" + testVal + "')", driver);
+
+    // All alters done, now we replicate them over.
+    Tuple incremental = incrementalLoadAndVerify(dbName, bootstrap.lastReplId, replDbName);
+
+    // Replication done, we need to check if the new property is added
+    try {
+      Database replDb = metaStoreClientMirror.getDatabase(replDbName);
+      assertTrue(replDb.getParameters().containsKey(testKey));
+      assertEquals(testVal, replDb.getParameters().get(testKey));
+    } catch (TException e) {
+      assertNull(e);
+    }
+
+    String newValue = "newFoo";
+    String newOwnerName = "newTest";
+    run("ALTER DATABASE " + dbName + " SET DBPROPERTIES ('" + testKey + "' = '" + newValue + "')", driver);
+    run("ALTER DATABASE " + dbName + " SET OWNER ROLE " + newOwnerName, driver);
+
+    incremental = incrementalLoadAndVerify(dbName, incremental.lastReplId, replDbName);
+
+    // Replication done, we need to check if new value is set for existing property
+    try {
+      Database replDb = metaStoreClientMirror.getDatabase(replDbName);
+      assertTrue(replDb.getParameters().containsKey(testKey));
+      assertEquals(newValue, replDb.getParameters().get(testKey));
+      assertEquals(newOwnerName, replDb.getOwnerName());
+      assertEquals("ROLE", replDb.getOwnerType().toString());
+    } catch (TException e) {
+      assertNull(e);
+    }
+  }
+
+  @Test
   public void testIncrementalLoad() throws IOException {
     String testName = "incrementalLoad";
     String dbName = createDB(testName, driver);
@@ -2244,18 +2302,24 @@ public class TestReplicationScenarios {
     String[] unptn_data_load1 = new String[] { "eleven" };
     String[] unptn_data_load2 = new String[] { "eleven", "thirteen" };
 
-    // 3 events to insert, last repl ID: replDumpId+3
+    // x events to insert, last repl ID: replDumpId+x
     run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[0] + "')", driver);
-    // 3 events to insert, last repl ID: replDumpId+6
+    String firstInsertLastReplId = replDumpDb(dbName, replDumpId, null, null).lastReplId;
+    Integer numOfEventsIns1 = Integer.valueOf(firstInsertLastReplId) - Integer.valueOf(replDumpId);
+
+    // x events to insert, last repl ID: replDumpId+2x
     run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[1] + "')", driver);
-    // 3 events to insert, last repl ID: replDumpId+9
+    String secondInsertLastReplId = replDumpDb(dbName, firstInsertLastReplId, null, null).lastReplId;
+    Integer numOfEventsIns2 = Integer.valueOf(secondInsertLastReplId) - Integer.valueOf(firstInsertLastReplId);
+
+    // x events to insert, last repl ID: replDumpId+3x
     run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[2] + "')", driver);
     verifyRun("SELECT a from " + dbName + ".unptned ORDER BY a", unptn_data, driver);
 
     run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", driverMirror);
 
     advanceDumpDir();
-    run("REPL DUMP " + dbName + " FROM " + replDumpId + " LIMIT 3", driver);
+    run("REPL DUMP " + dbName + " FROM " + replDumpId + " LIMIT " + numOfEventsIns1, driver);
     String incrementalDumpLocn = getResult(0, 0, driver);
     String incrementalDumpId = getResult(0, 1, true, driver);
     LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);
@@ -2270,7 +2334,7 @@ public class TestReplicationScenarios {
     lastReplID += 1000;
     String toReplID = String.valueOf(lastReplID);
 
-    run("REPL DUMP " + dbName + " FROM " + replDumpId + " TO " + toReplID + " LIMIT 3", driver);
+    run("REPL DUMP " + dbName + " FROM " + replDumpId + " TO " + toReplID + " LIMIT " + numOfEventsIns2, driver);
     incrementalDumpLocn = getResult(0, 0, driver);
     incrementalDumpId = getResult(0, 1, true, driver);
     LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);
@@ -2527,15 +2591,24 @@ public class TestReplicationScenarios {
     String[] unptn_data_load1 = new String[] { "eleven" };
     String[] unptn_data_load2 = new String[] { "eleven", "thirteen" };
 
-    // 3 events to insert, last repl ID: replDumpId+3
+    // x events to insert, last repl ID: replDumpId+x
     run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[0] + "')", driver);
-    // 3 events to insert, last repl ID: replDumpId+6
+    String firstInsertLastReplId = replDumpDb(dbName, replDumpId, null, null).lastReplId;
+    Integer numOfEventsIns1 = Integer.valueOf(firstInsertLastReplId) - Integer.valueOf(replDumpId);
+
+    // x events to insert, last repl ID: replDumpId+2x
     run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data[1] + "')", driver);
     verifyRun("SELECT a from " + dbName + ".unptned ORDER BY a", unptn_data, driver);
-    // 1 event to truncate, last repl ID: replDumpId+8
+    String secondInsertLastReplId = replDumpDb(dbName, firstInsertLastReplId, null, null).lastReplId;
+    Integer numOfEventsIns2 = Integer.valueOf(secondInsertLastReplId) - Integer.valueOf(firstInsertLastReplId);
+
+    // y event to truncate, last repl ID: replDumpId+2x+y
     run("TRUNCATE TABLE " + dbName + ".unptned", driver);
     verifyRun("SELECT a from " + dbName + ".unptned ORDER BY a", empty, driver);
-    // 3 events to insert, last repl ID: replDumpId+11
+    String thirdTruncLastReplId = replDumpDb(dbName, secondInsertLastReplId, null, null).lastReplId;
+    Integer numOfEventsTrunc3 = Integer.valueOf(thirdTruncLastReplId) - Integer.valueOf(secondInsertLastReplId);
+
+    // x events to insert, last repl ID: replDumpId+3x+y
     run("INSERT INTO TABLE " + dbName + ".unptned values('" + unptn_data_load1[0] + "')", driver);
     verifyRun("SELECT a from " + dbName + ".unptned ORDER BY a", unptn_data_load1, driver);
 
@@ -2543,7 +2616,7 @@ public class TestReplicationScenarios {
 
     // Dump and load only first insert (1 record)
     advanceDumpDir();
-    run("REPL DUMP " + dbName + " FROM " + replDumpId + " LIMIT 3", driver);
+    run("REPL DUMP " + dbName + " FROM " + replDumpId + " LIMIT " + numOfEventsIns1, driver);
     String incrementalDumpLocn = getResult(0, 0, driver);
     String incrementalDumpId = getResult(0, 1, true, driver);
     LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);
@@ -2559,7 +2632,7 @@ public class TestReplicationScenarios {
     lastReplID += 1000;
     String toReplID = String.valueOf(lastReplID);
 
-    run("REPL DUMP " + dbName + " FROM " + replDumpId + " TO " + toReplID + " LIMIT 3", driver);
+    run("REPL DUMP " + dbName + " FROM " + replDumpId + " TO " + toReplID + " LIMIT " + numOfEventsIns2, driver);
     incrementalDumpLocn = getResult(0, 0, driver);
     incrementalDumpId = getResult(0, 1, true, driver);
     LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);
@@ -2570,7 +2643,7 @@ public class TestReplicationScenarios {
 
     // Dump and load only truncate (0 records)
     advanceDumpDir();
-    run("REPL DUMP " + dbName + " FROM " + replDumpId + " LIMIT 2", driver);
+    run("REPL DUMP " + dbName + " FROM " + replDumpId + " LIMIT " + numOfEventsTrunc3, driver);
     incrementalDumpLocn = getResult(0, 0, driver);
     incrementalDumpId = getResult(0, 1, true, driver);
     LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", incrementalDumpLocn, incrementalDumpId, replDumpId);

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index f10c31e..fec3154 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -1183,15 +1183,16 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
       throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName);
     }
 
+    Map<String, String> params = database.getParameters();
+    if ((null != alterDbDesc.getReplicationSpec())
+        && !alterDbDesc.getReplicationSpec().allowEventReplacementInto(params)) {
+      LOG.debug("DDLTask: Alter Database {} is skipped as database is newer than update", dbName);
+      return 0; // no replacement, the existing database state is newer than our update.
+    }
+
     switch (alterDbDesc.getAlterType()) {
     case ALTER_PROPERTY:
       Map<String, String> newParams = alterDbDesc.getDatabaseProperties();
-      Map<String, String> params = database.getParameters();
-
-      if (!alterDbDesc.getReplicationSpec().allowEventReplacementInto(params)) {
-        LOG.debug("DDLTask: Alter Database {} is skipped as database is newer than update", dbName);
-        return 0; // no replacement, the existing database state is newer than our update.
-      }
 
       // if both old and new params are not null, merge them
       if (params != null && newParams != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
index bab64ad..65e3e3c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/repl/bootstrap/load/LoadDatabase.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
 
 import java.io.Serializable;
 import java.util.HashMap;
@@ -59,6 +60,7 @@ public class LoadDatabase {
       Task<? extends Serializable> dbRootTask = existEmptyDb(dbInMetadata.getName())
           ? alterDbTask(dbInMetadata, context.hiveConf)
           : createDbTask(dbInMetadata);
+      dbRootTask.addDependentTask(setOwnerInfoTask(dbInMetadata));
       tracker.addTask(dbRootTask);
       return tracker;
     } catch (Exception e) {
@@ -99,6 +101,14 @@ public class LoadDatabase {
     return TaskFactory.get(work, hiveConf);
   }
 
+  private Task<? extends Serializable> setOwnerInfoTask(Database dbObj) {
+    AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(),
+            new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()),
+            null);
+    DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc);
+    return TaskFactory.get(work, context.hiveConf);
+  }
+
   private boolean existEmptyDb(String dbName) throws InvalidOperationException, HiveException {
     Database db = context.hiveDb.getDatabase(dbName);
     if (db == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index c8c1665..bce9aa1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -818,7 +818,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       throw new SemanticException("Owner type " + nullCmdMsg);
     }
 
-    AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbName, principalDesc);
+    AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbName, principalDesc, null);
     addAlterDbDesc(alterDesc);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/DumpType.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/DumpType.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/DumpType.java
index e982603..a852363 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/DumpType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/DumpType.java
@@ -21,6 +21,7 @@ import org.apache.hadoop.hive.ql.parse.repl.load.message.AddNotNullConstraintHan
 import org.apache.hadoop.hive.ql.parse.repl.load.message.AddForeignKeyHandler;
 import org.apache.hadoop.hive.ql.parse.repl.load.message.AddPrimaryKeyHandler;
 import org.apache.hadoop.hive.ql.parse.repl.load.message.AddUniqueConstraintHandler;
+import org.apache.hadoop.hive.ql.parse.repl.load.message.AlterDatabaseHandler;
 import org.apache.hadoop.hive.ql.parse.repl.load.message.CreateFunctionHandler;
 import org.apache.hadoop.hive.ql.parse.repl.load.message.DefaultHandler;
 import org.apache.hadoop.hive.ql.parse.repl.load.message.DropConstraintHandler;
@@ -67,6 +68,12 @@ public enum DumpType {
       return new DropPartitionHandler();
     }
   },
+  EVENT_ALTER_DATABASE("EVENT_ALTER_DATABASE") {
+    @Override
+    public MessageHandler handler() {
+      return new AlterDatabaseHandler();
+    }
+  },
   EVENT_ALTER_TABLE("EVENT_ALTER_TABLE") {
     @Override
     public MessageHandler handler() {

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java
new file mode 100644
index 0000000..3863c59
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AlterDatabaseHandler.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.dump.events;
+
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.ql.parse.repl.DumpType;
+import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData;
+
+class AlterDatabaseHandler extends AbstractEventHandler {
+
+  AlterDatabaseHandler(NotificationEvent event) {
+    super(event);
+  }
+
+  @Override
+  public void handle(Context withinContext) throws Exception {
+    LOG.info("Processing#{} ALTER_DATABASE message : {}", fromEventId(), event.getMessage());
+    DumpMetaData dmd = withinContext.createDmd(this);
+    dmd.setPayload(event.getMessage());
+    dmd.write();
+  }
+
+  @Override
+  public DumpType dumpType() {
+    return DumpType.EVENT_ALTER_DATABASE;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/EventHandlerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/EventHandlerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/EventHandlerFactory.java
index 4a68235..922ebb4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/EventHandlerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/EventHandlerFactory.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -34,6 +34,7 @@ public class EventHandlerFactory {
 
   static {
     register(MessageFactory.ADD_PARTITION_EVENT, AddPartitionHandler.class);
+    register(MessageFactory.ALTER_DATABASE_EVENT, AlterDatabaseHandler.class);
     register(MessageFactory.ALTER_PARTITION_EVENT, AlterPartitionHandler.class);
     register(MessageFactory.ALTER_TABLE_EVENT, AlterTableHandler.class);
     register(MessageFactory.CREATE_FUNCTION_EVENT, CreateFunctionHandler.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
new file mode 100644
index 0000000..6886ce0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/AlterDatabaseHandler.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.repl.load.message;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.parse.repl.dump.Utils;
+import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
+
+import java.io.Serializable;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * AlterDatabaseHandler.
+ * Handler at target warehouse for the EVENT_ALTER_DATABASE type of messages
+ */
+public class AlterDatabaseHandler extends AbstractMessageHandler {
+  @Override
+  public List<Task<? extends Serializable>> handle(Context context)
+      throws SemanticException {
+
+    if (!context.isTableNameEmpty()) {
+      throw new SemanticException(
+              "Alter Database are not supported for table-level replication");
+    }
+
+    AlterDatabaseMessage msg = deserializer.getAlterDatabaseMessage(context.dmd.getPayload());
+    String actualDbName = context.isDbNameEmpty() ? msg.getDB() : context.dbName;
+
+    try {
+      Database oldDb = msg.getDbObjBefore();
+      Database newDb = msg.getDbObjAfter();
+      AlterDatabaseDesc alterDbDesc;
+
+      if ((oldDb.getOwnerType() == newDb.getOwnerType())
+            && oldDb.getOwnerName().equalsIgnoreCase(newDb.getOwnerName())) {
+        // If owner information is unchanged, then DB properties would've changed
+        Map<String, String> newDbProps = new HashMap<>();
+        Map<String, String> dbProps = newDb.getParameters();
+
+        for (Map.Entry<String, String> entry : dbProps.entrySet()) {
+          String key = entry.getKey();
+          // Ignore the keys which are local to source warehouse
+          if (key.startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX)
+                  || key.equals(ReplicationSpec.KEY.CURR_STATE_ID.toString())) {
+            continue;
+          }
+          newDbProps.put(key, entry.getValue());
+        }
+        alterDbDesc = new AlterDatabaseDesc(actualDbName,
+                newDbProps, context.eventOnlyReplicationSpec());
+      } else {
+        alterDbDesc = new AlterDatabaseDesc(actualDbName,
+                new PrincipalDesc(newDb.getOwnerName(), newDb.getOwnerType()),
+                context.eventOnlyReplicationSpec());
+      }
+
+      Task<DDLWork> alterDbTask = TaskFactory.get(
+          new DDLWork(readEntitySet, writeEntitySet, alterDbDesc), context.hiveConf);
+      context.log.debug("Added alter database task : {}:{}",
+              alterDbTask.getId(), actualDbName);
+
+      // Only database object is updated
+      updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName,
+              null, null);
+      return Collections.singletonList(alterDbTask);
+    } catch (Exception e) {
+      throw (e instanceof SemanticException)
+          ? (SemanticException) e
+          : new SemanticException("Error reading message members", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
index ca6f090..8def544 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
@@ -51,7 +51,8 @@ public class AlterDatabaseDesc extends DDLDesc implements Serializable {
   public AlterDatabaseDesc() {
   }
 
-  public AlterDatabaseDesc(String databaseName, Map<String, String> dbProps, ReplicationSpec replicationSpec) {
+  public AlterDatabaseDesc(String databaseName, Map<String, String> dbProps,
+                           ReplicationSpec replicationSpec) {
     super();
     this.databaseName = databaseName;
     this.replicationSpec = replicationSpec;
@@ -59,8 +60,10 @@ public class AlterDatabaseDesc extends DDLDesc implements Serializable {
     this.setAlterType(ALTER_DB_TYPES.ALTER_PROPERTY);
   }
 
-  public AlterDatabaseDesc(String databaseName, PrincipalDesc ownerPrincipal) {
+  public AlterDatabaseDesc(String databaseName, PrincipalDesc ownerPrincipal,
+                           ReplicationSpec replicationSpec) {
     this.databaseName = databaseName;
+    this.replicationSpec = replicationSpec;
     this.setOwnerPrincipal(ownerPrincipal);
     this.setAlterType(ALTER_DB_TYPES.ALTER_OWNER);
   }
@@ -119,9 +122,6 @@ public class AlterDatabaseDesc extends DDLDesc implements Serializable {
    * This can result in a "ALTER IF NEWER THAN" kind of semantic
    */
   public ReplicationSpec getReplicationSpec() {
-    if (replicationSpec == null) {
-      this.replicationSpec = new ReplicationSpec();
-    }
     return this.replicationSpec;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 7c0b7f1..f1b58c5 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -79,6 +79,7 @@ import org.apache.hadoop.hive.metastore.events.AddNotNullConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
+import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.AlterIndexEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
@@ -1038,6 +1039,9 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       startFunction("alter_database" + dbName);
       boolean success = false;
       Exception ex = null;
+      RawStore ms = getMS();
+      Database oldDB = null;
+      Map<String, String> transactionalListenersResponses = Collections.emptyMap();
 
       // Perform the same URI normalization as create_database_core.
       if (newDB.getLocationUri() != null) {
@@ -1045,17 +1049,38 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
 
       try {
-        Database oldDB = get_database_core(dbName);
+        oldDB = get_database_core(dbName);
         if (oldDB == null) {
           throw new MetaException("Could not alter database \"" + dbName + "\". Could not retrieve old definition.");
         }
         firePreEvent(new PreAlterDatabaseEvent(oldDB, newDB, this));
-        getMS().alterDatabase(dbName, newDB);
-        success = true;
+
+        ms.openTransaction();
+        ms.alterDatabase(dbName, newDB);
+
+        if (!transactionalListeners.isEmpty()) {
+          transactionalListenersResponses =
+                  MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                          EventType.ALTER_DATABASE,
+                          new AlterDatabaseEvent(oldDB, newDB, true, this));
+        }
+
+        success = ms.commitTransaction();
       } catch (Exception e) {
         ex = e;
         rethrowException(e);
       } finally {
+        if (!success) {
+          ms.rollbackTransaction();
+        }
+
+        if ((null != oldDB) && (!listeners.isEmpty())) {
+          MetaStoreListenerNotifier.notifyEvent(listeners,
+                  EventType.ALTER_DATABASE,
+                  new AlterDatabaseEvent(oldDB, newDB, success, this),
+                  null,
+                  transactionalListenersResponses, ms);
+        }
         endFunction("alter_database", success, ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
index fc4f4d7..3a351da 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.events.AddIndexEvent;
 import org.apache.hadoop.hive.metastore.events.AddNotNullConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
+import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.AlterIndexEvent;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
@@ -125,6 +126,13 @@ public abstract class MetaStoreEventListener implements Configurable {
   }
 
   /**
+   * @param dbEvent alter database event
+   * @throws MetaException
+   */
+  public void onAlterDatabase(AlterDatabaseEvent dbEvent) throws MetaException {
+  }
+
+  /**
    * @param partSetDoneEvent
    * @throws MetaException
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
index f6e25c6..3899e3c 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.metastore.events.AddNotNullConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
+import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.AlterIndexEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
@@ -64,7 +65,8 @@ public class MetaStoreListenerNotifier {
       ImmutableMap.<EventType, EventNotifier>builder()
           .put(EventType.CREATE_DATABASE, new EventNotifier() {
             @Override
-            public void notify(MetaStoreEventListener listener, ListenerEvent event) throws MetaException {
+            public void notify(MetaStoreEventListener listener,
+                               ListenerEvent event) throws MetaException {
               listener.onCreateDatabase((CreateDatabaseEvent)event);
             }
           })
@@ -98,6 +100,13 @@ public class MetaStoreListenerNotifier {
               listener.onDropPartition((DropPartitionEvent)event);
             }
           })
+          .put(EventType.ALTER_DATABASE, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener,
+                               ListenerEvent event) throws MetaException {
+              listener.onAlterDatabase((AlterDatabaseEvent)event);
+            }
+          })
           .put(EventType.ALTER_TABLE, new EventNotifier() {
             @Override
             public void notify(MetaStoreEventListener listener, ListenerEvent event) throws MetaException {

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java
new file mode 100644
index 0000000..8075e11
--- /dev/null
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/events/AlterDatabaseEvent.java
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.events;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hive.metastore.IHMSHandler;
+import org.apache.hadoop.hive.metastore.api.Database;
+
+/**
+ * AlterDatabaseEvent.
+ * Event which is captured during database alters for owner info or properties or location
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public class AlterDatabaseEvent extends ListenerEvent {
+
+  private final Database oldDb;
+  private final Database newDb;
+
+  public AlterDatabaseEvent(Database oldDb, Database newDb, boolean status, IHMSHandler handler) {
+    super(status, handler);
+    this.oldDb = oldDb;
+    this.newDb = newDb;
+  }
+
+  /**
+   * @return the old db
+   */
+  public Database getOldDatabase() {
+    return oldDb;
+  }
+
+  /**
+   * @return the new db
+   */
+  public Database getNewDatabase() {
+    return newDb;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/AlterDatabaseMessage.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/AlterDatabaseMessage.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/AlterDatabaseMessage.java
new file mode 100644
index 0000000..2d9f53f
--- /dev/null
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/AlterDatabaseMessage.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hadoop.hive.metastore.messaging;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+
+/**
+ * AlterDatabaseMessage.
+ * Abstract class to store the Alter database message
+ */
+public abstract class AlterDatabaseMessage extends EventMessage {
+
+  protected AlterDatabaseMessage() {
+    super(EventType.ALTER_DATABASE);
+  }
+
+  public abstract Database getDbObjBefore() throws Exception;
+  public abstract Database getDbObjAfter() throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventMessage.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventMessage.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventMessage.java
index 7b22fac..dad2f5b 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventMessage.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/EventMessage.java
@@ -36,6 +36,7 @@ public abstract class EventMessage {
     DROP_TABLE(MessageFactory.DROP_TABLE_EVENT),
     ADD_PARTITION(MessageFactory.ADD_PARTITION_EVENT),
     DROP_PARTITION(MessageFactory.DROP_PARTITION_EVENT),
+    ALTER_DATABASE(MessageFactory.ALTER_DATABASE_EVENT),
     ALTER_TABLE(MessageFactory.ALTER_TABLE_EVENT),
     ALTER_PARTITION(MessageFactory.ALTER_PARTITION_EVENT),
     INSERT(MessageFactory.INSERT_EVENT),

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageDeserializer.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageDeserializer.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageDeserializer.java
index 810dc64..f85dc40 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageDeserializer.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageDeserializer.java
@@ -32,6 +32,8 @@ public abstract class MessageDeserializer {
     switch (EventMessage.EventType.valueOf(eventTypeString)) {
     case CREATE_DATABASE:
       return getCreateDatabaseMessage(messageBody);
+    case ALTER_DATABASE:
+      return getAlterDatabaseMessage(messageBody);
     case DROP_DATABASE:
       return getDropDatabaseMessage(messageBody);
     case CREATE_TABLE:
@@ -79,6 +81,11 @@ public abstract class MessageDeserializer {
   public abstract CreateDatabaseMessage getCreateDatabaseMessage(String messageBody);
 
   /**
+   * Method to de-serialize AlterDatabaseMessage instance.
+   */
+  public abstract AlterDatabaseMessage getAlterDatabaseMessage(String messageBody);
+
+  /**
    * Method to de-serialize DropDatabaseMessage instance.
    */
   public abstract DropDatabaseMessage getDropDatabaseMessage(String messageBody);

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageFactory.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageFactory.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageFactory.java
index 46fd336..0e3357d 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageFactory.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/MessageFactory.java
@@ -50,6 +50,7 @@ public abstract class MessageFactory {
   public static final String ALTER_TABLE_EVENT = "ALTER_TABLE";
   public static final String DROP_TABLE_EVENT = "DROP_TABLE";
   public static final String CREATE_DATABASE_EVENT = "CREATE_DATABASE";
+  public static final String ALTER_DATABASE_EVENT = "ALTER_DATABASE";
   public static final String DROP_DATABASE_EVENT = "DROP_DATABASE";
   public static final String INSERT_EVENT = "INSERT";
   public static final String CREATE_FUNCTION_EVENT = "CREATE_FUNCTION";
@@ -134,6 +135,14 @@ public abstract class MessageFactory {
   public abstract CreateDatabaseMessage buildCreateDatabaseMessage(Database db);
 
   /**
+   * Factory method for AlterDatabaseMessage.
+   * @param beforeDb The Database before alter.
+   * @param afterDb The Database after alter.
+   * @return AlterDatabaseMessage instance.
+   */
+  public abstract AlterDatabaseMessage buildAlterDatabaseMessage(Database beforeDb, Database afterDb);
+
+  /**
    * Factory method for DropDatabaseMessage.
    * @param db The Database being dropped.
    * @return DropDatabaseMessage instance.

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONAlterDatabaseMessage.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONAlterDatabaseMessage.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONAlterDatabaseMessage.java
new file mode 100644
index 0000000..28a7d45
--- /dev/null
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONAlterDatabaseMessage.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.messaging.json;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage;
+import org.apache.thrift.TException;
+import org.codehaus.jackson.annotate.JsonProperty;
+
+/**
+ * JSON alter database message.
+ */
+public class JSONAlterDatabaseMessage extends AlterDatabaseMessage {
+
+  @JsonProperty
+  String server, servicePrincipal, db, dbObjBeforeJson, dbObjAfterJson;
+
+  @JsonProperty
+  Long timestamp;
+
+  /**
+   * Default constructor, needed for Jackson.
+   */
+  public JSONAlterDatabaseMessage() {
+  }
+
+  public JSONAlterDatabaseMessage(String server, String servicePrincipal,
+                                  Database dbObjBefore, Database dbObjAfter, Long timestamp) {
+    this.server = server;
+    this.servicePrincipal = servicePrincipal;
+    this.db = dbObjBefore.getName();
+    this.timestamp = timestamp;
+    try {
+      this.dbObjBeforeJson = JSONMessageFactory.createDatabaseObjJson(dbObjBefore);
+      this.dbObjAfterJson = JSONMessageFactory.createDatabaseObjJson(dbObjAfter);
+    } catch (TException e) {
+      throw new IllegalArgumentException("Could not serialize: ", e);
+    }
+    checkValid();
+  }
+
+  @Override
+  public String getServer() {
+    return server;
+  }
+
+  @Override
+  public String getServicePrincipal() {
+    return servicePrincipal;
+  }
+
+  @Override
+  public String getDB() {
+    return db;
+  }
+
+  @Override
+  public Long getTimestamp() {
+    return timestamp;
+  }
+
+  @Override
+  public Database getDbObjBefore() throws Exception {
+    return (Database) JSONMessageFactory.getTObj(dbObjBeforeJson, Database.class);
+  }
+
+  @Override
+  public Database getDbObjAfter() throws Exception {
+    return (Database) JSONMessageFactory.getTObj(dbObjAfterJson, Database.class);
+  }
+
+  @Override
+  public String toString() {
+    try {
+      return JSONMessageDeserializer.mapper.writeValueAsString(this);
+    } catch (Exception e) {
+      throw new IllegalArgumentException("Could not serialize: ", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
index 15fa4aa..f61138f 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializer.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage;
 import org.apache.hadoop.hive.metastore.messaging.AddPartitionMessage;
 import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage;
 import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage;
+import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterIndexMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage;
@@ -63,7 +64,18 @@ public class JSONMessageDeserializer extends MessageDeserializer {
       return mapper.readValue(messageBody, JSONCreateDatabaseMessage.class);
     }
     catch (Exception exception) {
-      throw new IllegalArgumentException("Could not construct JSONCreateDatabaseMessage.", exception);
+      throw new IllegalArgumentException("Could not construct JSONCreateDatabaseMessage.",
+                                        exception);
+    }
+  }
+
+  @Override
+  public AlterDatabaseMessage getAlterDatabaseMessage(String messageBody) {
+    try {
+      return mapper.readValue(messageBody, JSONAlterDatabaseMessage.class);
+    } catch (Exception exception) {
+      throw new IllegalArgumentException("Could not construct JSONAlterDatabaseMessage.",
+                                        exception);
     }
   }
 
@@ -143,7 +155,8 @@ public class JSONMessageDeserializer extends MessageDeserializer {
       return mapper.readValue(messageBody, JSONCreateFunctionMessage.class);
     }
     catch (Exception exception) {
-      throw new IllegalArgumentException("Could not construct JSONCreateFunctionMessage.", exception);
+      throw new IllegalArgumentException("Could not construct JSONCreateFunctionMessage.",
+                                        exception);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ad5bcb15/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageFactory.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageFactory.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageFactory.java
index 916a8e8..a9fe196 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageFactory.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageFactory.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.metastore.messaging.AddNotNullConstraintMessage;
 import org.apache.hadoop.hive.metastore.messaging.AddPartitionMessage;
 import org.apache.hadoop.hive.metastore.messaging.AddPrimaryKeyMessage;
 import org.apache.hadoop.hive.metastore.messaging.AddUniqueConstraintMessage;
+import org.apache.hadoop.hive.metastore.messaging.AlterDatabaseMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterIndexMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage;
@@ -102,6 +103,12 @@ public class JSONMessageFactory extends MessageFactory {
   }
 
   @Override
+  public AlterDatabaseMessage buildAlterDatabaseMessage(Database beforeDb, Database afterDb) {
+    return new JSONAlterDatabaseMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL,
+                                        beforeDb, afterDb, now());
+  }
+
+  @Override
   public DropDatabaseMessage buildDropDatabaseMessage(Database db) {
     return new JSONDropDatabaseMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL, db.getName(), now());
   }
@@ -113,7 +120,8 @@ public class JSONMessageFactory extends MessageFactory {
 
   @Override
   public AlterTableMessage buildAlterTableMessage(Table before, Table after, boolean isTruncateOp) {
-    return new JSONAlterTableMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL, before, after, isTruncateOp, now());
+    return new JSONAlterTableMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL,
+                                    before, after, isTruncateOp, now());
   }
 
   @Override
@@ -131,8 +139,8 @@ public class JSONMessageFactory extends MessageFactory {
   @Override
   public AlterPartitionMessage buildAlterPartitionMessage(Table table, Partition before,
       Partition after, boolean isTruncateOp) {
-    return new JSONAlterPartitionMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL, table, before, after, isTruncateOp,
-        now());
+    return new JSONAlterPartitionMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL,
+                                        table, before, after, isTruncateOp, now());
   }
 
   @Override
@@ -170,7 +178,8 @@ public class JSONMessageFactory extends MessageFactory {
   @Override
   public InsertMessage buildInsertMessage(Table tableObj, Partition partObj,
                                           boolean replace, Iterator<String> fileIter) {
-    return new JSONInsertMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL, tableObj, partObj, replace, fileIter, now());
+    return new JSONInsertMessage(MS_SERVER_URL, MS_SERVICE_PRINCIPAL,
+                                tableObj, partObj, replace, fileIter, now());
   }
 
   @Override
@@ -242,6 +251,11 @@ public class JSONMessageFactory extends MessageFactory {
     return serializer.toString(notNullConstaintObj, "UTF-8");
   }
 
+  static String createDatabaseObjJson(Database dbObj) throws TException {
+    TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
+    return serializer.toString(dbObj, "UTF-8");
+  }
+
   static String createTableObjJson(Table tableObj) throws TException {
     TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
     return serializer.toString(tableObj, "UTF-8");