You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ab...@apache.org on 2019/11/29 21:35:49 UTC

[hive] branch master updated: HIVE-21954: QTest: support for running qtests on various metastore DBs (László Bodor reviewed by Zoltan Haindrich)

This is an automated email from the ASF dual-hosted git repository.

abstractdog pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new d645d82  HIVE-21954: QTest: support for running qtests on various metastore DBs (László Bodor reviewed by Zoltan Haindrich)
d645d82 is described below

commit d645d827d95de36175194407bd1e2f6725362aff
Author: László Bodor <bo...@gmail.com>
AuthorDate: Fri Nov 29 22:28:39 2019 +0100

    HIVE-21954: QTest: support for running qtests on various metastore DBs (László Bodor reviewed by Zoltan Haindrich)
    
    Signed-off-by: Laszlo Bodor <bo...@gmail.com>
---
 data/scripts/q_test_init.sql                       |  10 -
 .../apache/hadoop/hive/ql/TestLocationQueries.java |   1 +
 .../org/apache/hadoop/hive/ql/TestMTQueries.java   |   1 +
 itests/qtest/pom.xml                               |  47 +++-
 .../control/AbstractCoreBlobstoreCliDriver.java    |   8 +-
 .../apache/hadoop/hive/cli/control/CliAdapter.java |  55 ++++-
 .../hive/cli/control/CoreAccumuloCliDriver.java    |  15 +-
 .../hadoop/hive/cli/control/CoreBeeLineDriver.java |   6 +
 .../hadoop/hive/cli/control/CoreCliDriver.java     |  26 +-
 .../hive/cli/control/CoreCompareCliDriver.java     |  11 +-
 .../apache/hadoop/hive/cli/control/CoreDummy.java  |   6 +
 .../hive/cli/control/CoreHBaseCliDriver.java       |  11 +-
 .../cli/control/CoreHBaseNegativeCliDriver.java    |   6 +
 .../hadoop/hive/cli/control/CoreKuduCliDriver.java |  10 +-
 .../cli/control/CoreKuduNegativeCliDriver.java     |  10 +-
 .../hive/cli/control/CoreNegativeCliDriver.java    |  10 +-
 .../hadoop/hive/cli/control/CorePerfCliDriver.java |  37 ++-
 .../hadoop/hive/ql/QTestMetaStoreHandler.java      | 117 +++++++++
 .../hadoop/hive/ql/QTestSystemProperties.java      |   6 +
 .../java/org/apache/hadoop/hive/ql/QTestUtil.java  |  34 ++-
 .../hadoop/hive/ql/parse/CoreParseNegative.java    |  14 +-
 pom.xml                                            |   1 +
 ql/src/test/queries/clientpositive/create_func1.q  |   3 +-
 .../clientpositive/partition_params_postgres.q     |   5 +
 .../test/results/clientpositive/create_func1.q.out |  16 ++
 .../test/results/clientpositive/llap/sysdb.q.out   |   1 -
 .../clientpositive/partition_params_postgres.q.out |  76 ++++++
 .../results/clientpositive/show_functions.q.out    |   2 -
 standalone-metastore/DEV-README                    |   6 +
 .../hadoop/hive/metastore/MetaStoreSchemaInfo.java |   6 +-
 .../tools/schematool/MetastoreSchemaTool.java      |  17 +-
 .../hive/metastore/dbinstall/DbInstallBase.java    | 237 +-----------------
 .../hive/metastore/dbinstall/ITestMysql.java       |  74 ++----
 .../hive/metastore/dbinstall/ITestOracle.java      |  75 ++----
 .../hive/metastore/dbinstall/ITestPostgres.java    |  74 ++----
 .../hive/metastore/dbinstall/ITestSqlServer.java   |  76 ++----
 .../DatabaseRule.java}                             | 265 +++++++++++----------
 .../hive/metastore/dbinstall/rules/Derby.java      | 103 ++++++++
 .../{ITestSqlServer.java => rules/Mssql.java}      |  38 +--
 .../{ITestMysql.java => rules/Mysql.java}          |  42 ++--
 .../{ITestOracle.java => rules/Oracle.java}        |  33 ++-
 .../{ITestPostgres.java => rules/Postgres.java}    |  43 ++--
 .../tools/schematool/TestMetastoreSchemaTool.java  |  10 +-
 43 files changed, 849 insertions(+), 795 deletions(-)

diff --git a/data/scripts/q_test_init.sql b/data/scripts/q_test_init.sql
index df05828..e69de29 100644
--- a/data/scripts/q_test_init.sql
+++ b/data/scripts/q_test_init.sql
@@ -1,10 +0,0 @@
-set hive.stats.dbclass=fs;
-
---
--- Function qtest_get_java_boolean
---
-DROP FUNCTION IF EXISTS qtest_get_java_boolean;
-CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean';
-
-reset;
-set hive.stats.dbclass=fs;
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
index eb3b935..383e35e 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestLocationQueries.java
@@ -122,6 +122,7 @@ public class TestLocationQueries extends BaseTestQueries {
 
     for (int i = 0; i < qfiles.length; i++) {
       qt[i] = new CheckResults(resDir, logDir, MiniClusterType.NONE, "parta");
+      qt[i].postInit();
       qt[i].newSession();
       qt[i].addFile(qfiles[i], false);
       qt[i].clearTestSideEffects();
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
index 3e0cdac..d72c14a 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestMTQueries.java
@@ -42,6 +42,7 @@ public class TestMTQueries extends BaseTestQueries {
     QTestUtil[] qts = QTestRunnerUtils.queryListRunnerSetup(qfiles, resDir, logDir, "q_test_init_src_with_stats.sql",
       "q_test_cleanup_src_with_stats.sql");
     for (QTestUtil util : qts) {
+      util.postInit();
       // derby fails creating multiple stats aggregator concurrently
       util.getConf().setBoolean("hive.exec.submitviachild", true);
       util.getConf().setBoolean("hive.exec.submit.local.task.via.child", true);
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index 109f7a1..be8e377 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -401,6 +401,20 @@
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-api</artifactId>
     </dependency>
+    <dependency>
+      <!-- Note, this is LGPL.  But we're only using it in a test and not changing it, so I
+      believe we are fine. -->
+      <groupId>org.mariadb.jdbc</groupId>
+      <artifactId>mariadb-java-client</artifactId>
+      <version>${mariadb.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.postgresql</groupId>
+      <artifactId>postgresql</artifactId>
+      <version>9.3-1102-jdbc41</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <profiles>
     <profile>
@@ -437,6 +451,24 @@
         <artifactId>maven-antrun-plugin</artifactId>
         <executions>
           <execution>
+            <id>setup-metastore-scripts</id>
+            <phase>process-test-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <mkdir dir="${test.tmp.dir}/scripts/metastore" />
+                <copy todir="${test.tmp.dir}/scripts/metastore">
+                  <fileset dir="${basedir}/${hive.path.to.root}/metastore/scripts/"/>
+                </copy>
+                <copy todir="${test.tmp.dir}/scripts/metastore/upgrade" overwrite="true">
+                  <fileset dir="${basedir}/${hive.path.to.root}/standalone-metastore/metastore-server/src/main/sql/"/>
+                </copy>
+              </target>
+            </configuration>
+          </execution>
+          <execution>
             <id>generate-tests-sources</id>
             <phase>generate-test-sources</phase>
             <configuration>
@@ -458,10 +490,6 @@
                 <mkdir dir="${project.build.directory}/qfile-results/contribnegative"/>
                 <mkdir dir="${project.build.directory}/qfile-results/contribclientpositive"/>
                 <mkdir dir="${project.build.directory}/qfile-results/contribclientnegative"/>
-
-
-
-
               </target>
             </configuration>
             <goals>
@@ -488,6 +516,17 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <additionalClasspathElements>
+            <additionalClasspathElement>${test.conf.dir}</additionalClasspathElement>
+            <additionalClasspathElement>${basedir}/${hive.path.to.root}/conf</additionalClasspathElement>
+            <additionalClasspathElement>${itest.jdbc.jars}</additionalClasspathElement>
+          </additionalClasspathElements>
+        </configuration>
+      </plugin>
     </plugins>
   </build>
 
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
index 50417e9..3c0ba14 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java
@@ -78,9 +78,6 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
 
       // do a one time initialization
       setupUniqueTestPath();
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
 
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
@@ -135,6 +132,11 @@ public abstract class AbstractCoreBlobstoreCliDriver extends CliAdapter {
     }
   }
 
+  @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
   private static String debugHint = "\nSee ./itests/hive-blobstore/target/tmp/log/hive.log, "
       + "or check ./itests/hive-blobstore/target/surefire-reports/ for specific test cases logs.";
 
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java
index 574a67f..fcfc790 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CliAdapter.java
@@ -22,9 +22,13 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.hadoop.hive.ql.QTestMetaStoreHandler;
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.junit.rules.TestRule;
 import org.junit.runner.Description;
 import org.junit.runners.model.Statement;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class adapts old vm test-executors to be executed in multiple instances
@@ -32,9 +36,13 @@ import org.junit.runners.model.Statement;
 public abstract class CliAdapter {
 
   protected final AbstractCliConfig cliConfig;
+  protected QTestMetaStoreHandler metaStoreHandler;
+  boolean firstTestNotYetRun = true; // this can protect class/test level logic from each other
+  private static final Logger LOG = LoggerFactory.getLogger(CliAdapter.class);
 
   public CliAdapter(AbstractCliConfig cliConfig) {
     this.cliConfig = cliConfig;
+    metaStoreHandler = new QTestMetaStoreHandler();
   }
 
   public final List<Object[]> getParameters() throws Exception {
@@ -68,11 +76,29 @@ public abstract class CliAdapter {
         return new Statement() {
           @Override
           public void evaluate() throws Throwable {
-            CliAdapter.this.beforeClass();
+            metaStoreHandler.setSystemProperties(); // for QTestUtil pre-initialization
+            CliAdapter.this.beforeClass(); // instantiating QTestUtil
+
+            LOG.debug("will initialize metastore database in class rule");
+            metaStoreHandler.getRule().before();
+            metaStoreHandler.getRule().install();
+
+            if (getQt() != null) {
+              metaStoreHandler.setMetaStoreConfiguration(getQt().getConf());
+              getQt().postInit();
+              getQt().newSession();
+              getQt().createSources();
+            }
+
+            CliAdapter.this.beforeClassSpec();
             try {
               base.evaluate();
             } finally {
               CliAdapter.this.shutdown();
+              if (getQt() != null && firstTestNotYetRun) {
+                LOG.debug("will destroy metastore database in class rule (if not derby)");
+                metaStoreHandler.afterTest(getQt());
+              }
             }
           }
         };
@@ -80,6 +106,10 @@ public abstract class CliAdapter {
     };
   }
 
+  // override this if e.g. a metastore dependent init logic is needed
+  protected void beforeClassSpec() {
+  }
+
   public final TestRule buildTestRule() {
     return new TestRule() {
       @Override
@@ -87,11 +117,28 @@ public abstract class CliAdapter {
         return new Statement() {
           @Override
           public void evaluate() throws Throwable {
+
+            if (getQt() != null && !firstTestNotYetRun) {
+              LOG.debug("will initialize metastore database in test rule");
+              metaStoreHandler.setMetaStoreConfiguration(getQt().getConf());
+              metaStoreHandler.beforeTest();
+            }
+            firstTestNotYetRun = false;
+
+            if (getQt() != null && CliAdapter.this.shouldRunCreateScriptBeforeEveryTest()){
+              // it's because some drivers still use init scripts, which can create a non-dataset table
+              // and get cleant after every test
+              getQt().createSources();
+            }
             CliAdapter.this.setUp();
             try {
               base.evaluate();
             } finally {
               CliAdapter.this.tearDown();
+              if (getQt() != null) {
+                LOG.debug("will destroy metastore database in test rule (if not derby)");
+                metaStoreHandler.afterTest(getQt());
+              }
             }
           }
         };
@@ -99,6 +146,12 @@ public abstract class CliAdapter {
     };
   }
 
+  protected boolean shouldRunCreateScriptBeforeEveryTest() {
+    return false;
+  }
+
+  protected abstract QTestUtil getQt();
+
   // HIVE-14444: pending refactor to push File forward
   public final void runTest(String name, File qfile) throws Exception {
     runTest(name, qfile.getName(), qfile.getAbsolutePath());
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
index 9a23ef8..e6c5e70 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java
@@ -25,6 +25,7 @@ import java.io.File;
 import org.apache.hadoop.hive.accumulo.AccumuloQTestUtil;
 import org.apache.hadoop.hive.accumulo.AccumuloTestSetup;
 import org.apache.hadoop.hive.ql.QTestProcessExecResult;
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
 import org.junit.After;
@@ -50,12 +51,6 @@ public class CoreAccumuloCliDriver extends CliAdapter {
     try {
       qt = new AccumuloQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
           new AccumuloTestSetup(), initScript, cleanupScript);
-
-      // do a one time initialization
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
-
     } catch (Exception e) {
       throw new RuntimeException("Unexpected exception in setUp", e);
     }
@@ -66,7 +61,6 @@ public class CoreAccumuloCliDriver extends CliAdapter {
   public void shutdown() {
     try {
       qt.shutdown();
-
     } catch (Exception e) {
       throw new RuntimeException("Unexpected exception in tearDown", e);
     }
@@ -77,7 +71,6 @@ public class CoreAccumuloCliDriver extends CliAdapter {
   public void setUp() {
     try {
       qt.newSession();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -92,7 +85,6 @@ public class CoreAccumuloCliDriver extends CliAdapter {
     try {
       qt.clearPostTestEffects();
       qt.clearTestSideEffects();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -102,6 +94,11 @@ public class CoreAccumuloCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) {
     long startTime = System.currentTimeMillis();
     try {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
index c8239a7..cd6bc34 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreBeeLineDriver.java
@@ -37,6 +37,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConfUtil;
 import org.apache.hadoop.hive.ql.QTestProcessExecResult;
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.apache.hadoop.hive.ql.dataset.Dataset;
 import org.apache.hadoop.hive.ql.dataset.DatasetCollection;
 import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler;
@@ -202,6 +203,11 @@ public class CoreBeeLineDriver extends CliAdapter {
     }
   }
 
+  @Override
+  protected QTestUtil getQt() {
+    return null;
+  }
+
   private void runTest(QFile qFile, List<Callable<Void>> preCommands) throws Exception {
     try (QFileBeeLineClient beeLineClient = clientBuilder.getClient(qFile.getLogFile())) {
       long startTime = System.currentTimeMillis();
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
index d06acfb..7a90309 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java
@@ -57,7 +57,7 @@ public class CoreCliDriver extends CliAdapter {
     LOG.info(message);
     System.err.println(message);
 
-    MiniClusterType miniMR =cliConfig.getClusterType();
+    MiniClusterType miniMR = cliConfig.getClusterType();
     String hiveConfDir = cliConfig.getHiveConfDir();
     String initScript = cliConfig.getInitScript();
     String cleanupScript = cliConfig.getCleanupScript();
@@ -79,25 +79,6 @@ public class CoreCliDriver extends CliAdapter {
                 .build());
         }
       }.invoke("QtestUtil instance created", LOG, true);
-
-      // do a one time initialization
-      new ElapsedTimeLoggingWrapper<Void>() {
-        @Override
-        public Void invokeInternal() throws Exception {
-          qt.newSession();
-          qt.cleanUp(); // I don't think this is neccessary...
-          return null;
-        }
-      }.invoke("Initialization cleanup done.", LOG, true);
-
-      new ElapsedTimeLoggingWrapper<Void>() {
-        @Override
-        public Void invokeInternal() throws Exception {
-          qt.createSources();
-          return null;
-        }
-      }.invoke("Initialization createSources done.", LOG, true);
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -168,6 +149,11 @@ public class CoreCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String testName, String fname, String fpath) {
     Stopwatch sw = Stopwatch.createStarted();
     boolean skipped = false;
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
index 62ea960..9a519ff 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java
@@ -64,12 +64,6 @@ public class CoreCompareCliDriver extends CliAdapter{
             .withCleanupScript(cleanupScript)
             .withLlapIo(false)
             .build());
-
-      // do a one time initialization
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -120,6 +114,11 @@ public class CoreCompareCliDriver extends CliAdapter{
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) {
     final String queryDirectory = cliConfig.getQueryDirectory();
 
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java
index 301b91e..9684565 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreDummy.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.fail;
 import java.util.List;
 
 import org.apache.hadoop.hive.ql.QFileVersionHandler;
+import org.apache.hadoop.hive.ql.QTestUtil;
 
 public class CoreDummy extends CliAdapter {
   QFileVersionHandler qvh = new QFileVersionHandler();
@@ -47,6 +48,11 @@ public class CoreDummy extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return null;
+  }
+
+  @Override
   public void runTest(String name, String name2, String absolutePath) {
     List<String> versionFiles = qvh.getVersionFiles(cliConfig.getQueryDirectory(), name);
     if (versionFiles.size() < 2) {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
index 40545d8..5cad1e2 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java
@@ -25,6 +25,7 @@ import java.io.File;
 import org.apache.hadoop.hive.hbase.HBaseQTestUtil;
 import org.apache.hadoop.hive.hbase.HBaseTestSetup;
 import org.apache.hadoop.hive.ql.QTestProcessExecResult;
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
 import org.junit.After;
@@ -50,11 +51,6 @@ public class CoreHBaseCliDriver extends CliAdapter {
     try {
       qt = new HBaseQTestUtil(cliConfig.getResultsDir(), cliConfig.getLogDir(), miniMR,
           new HBaseTestSetup(), initScript, cleanupScript);
-
-      qt.newSession();
-      qt.cleanUp(null);
-      qt.createSources(null);
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -103,6 +99,11 @@ public class CoreHBaseCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) {
     long startTime = System.currentTimeMillis();
     try {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
index 6094e6d..62672e6 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java
@@ -26,6 +26,7 @@ import java.io.File;
 import org.apache.hadoop.hive.hbase.HBaseQTestUtil;
 import org.apache.hadoop.hive.hbase.HBaseTestSetup;
 import org.apache.hadoop.hive.ql.QTestProcessExecResult;
+import org.apache.hadoop.hive.ql.QTestUtil;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
 import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType;
 import org.junit.After;
@@ -99,6 +100,11 @@ public class CoreHBaseNegativeCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) {
     long startTime = System.currentTimeMillis();
     try {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java
index 71134e7..ab9c36f 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduCliDriver.java
@@ -57,11 +57,6 @@ public class CoreKuduCliDriver extends CliAdapter {
           .withLlapIo(true)
           .withQTestSetup(new KuduTestSetup())
           .build());
-
-      // do a one time initialization
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
     } catch (Exception e) {
       throw new RuntimeException("Unexpected exception in setUp", e);
     }
@@ -107,6 +102,11 @@ public class CoreKuduCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) {
     long startTime = System.currentTimeMillis();
     try {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java
index 4f6988c..0fa3ac7 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreKuduNegativeCliDriver.java
@@ -57,11 +57,6 @@ public class CoreKuduNegativeCliDriver extends CliAdapter {
           .withLlapIo(true)
           .withQTestSetup(new KuduTestSetup())
           .build());
-
-      // do a one time initialization
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
     } catch (Exception e) {
       throw new RuntimeException("Unexpected exception in setUp", e);
     }
@@ -107,6 +102,11 @@ public class CoreKuduNegativeCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) {
     long startTime = System.currentTimeMillis();
     try {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
index bb9e655..2536c1d 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java
@@ -59,11 +59,6 @@ public class CoreNegativeCliDriver extends CliAdapter{
             .withCleanupScript(cleanupScript)
             .withLlapIo(false)
             .build());
-      // do a one time initialization
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -115,6 +110,11 @@ public class CoreNegativeCliDriver extends CliAdapter{
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String tname, String fname, String fpath) throws Exception {
     long startTime = System.currentTimeMillis();
     try {
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
index 59c71f5..06d1e16 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java
@@ -32,6 +32,8 @@ import org.apache.hadoop.hive.ql.QTestMiniClusters.MiniClusterType;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
 import org.junit.After;
 import org.junit.AfterClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Strings;
 
@@ -45,6 +47,7 @@ import com.google.common.base.Strings;
  */
 public class CorePerfCliDriver extends CliAdapter {
 
+  private static final Logger LOG = LoggerFactory.getLogger(CorePerfCliDriver.class);
   private static QTestUtil qt;
 
   public CorePerfCliDriver(AbstractCliConfig testCliConfig) {
@@ -66,16 +69,6 @@ public class CorePerfCliDriver extends CliAdapter {
           .withOutDir(cliConfig.getResultsDir()).withLogDir(cliConfig.getLogDir())
           .withClusterType(miniMR).withConfDir(hiveConfDir).withInitScript(initScript)
           .withCleanupScript(cleanupScript).withLlapIo(false).build());
-
-      // do a one time initialization
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
-      // Manually modify the underlying metastore db to reflect statistics corresponding to
-      // the 30TB TPCDS scale set. This way the optimizer will generate plans for a 30 TB set.
-      MetaStoreDumpUtility.setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(qt.getConf(),
-          QTestSystemProperties.getTempDir());
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -86,6 +79,18 @@ public class CorePerfCliDriver extends CliAdapter {
   }
 
   @Override
+  protected void beforeClassSpec() {
+    overrideStatsInMetastore();
+  }
+
+  private void overrideStatsInMetastore() {
+    // Manually modify the underlying metastore db to reflect statistics corresponding to
+    // the 30TB TPCDS scale set. This way the optimizer will generate plans for a 30 TB set.
+    MetaStoreDumpUtility.setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(qt.getConf(),
+        QTestSystemProperties.getTempDir());
+  }
+
+  @Override
   @AfterClass
   public void shutdown() throws Exception {
     qt.shutdown();
@@ -95,7 +100,6 @@ public class CorePerfCliDriver extends CliAdapter {
   public void setUp() {
     try {
       qt.newSession();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -109,7 +113,6 @@ public class CorePerfCliDriver extends CliAdapter {
   public void tearDown() {
     try {
       qt.clearPostTestEffects();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -119,9 +122,15 @@ public class CorePerfCliDriver extends CliAdapter {
   }
 
   @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
+  @Override
   public void runTest(String name, String fname, String fpath) {
     long startTime = System.currentTimeMillis();
     try {
+      LOG.info("Begin query: " + fname);
       System.err.println("Begin query: " + fname);
 
       qt.addFile(fpath);
@@ -144,7 +153,9 @@ public class CorePerfCliDriver extends CliAdapter {
     }
 
     long elapsedTime = System.currentTimeMillis() - startTime;
-    System.err.println("Done query: " + fname + " elapsedTime=" + elapsedTime / 1000 + "s");
+    String message = "Done query: " + fname + " elapsedTime=" + elapsedTime / 1000 + "s";
+    LOG.info(message);
+    System.err.println(message);
     assertTrue("Test passed", true);
   }
 
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestMetaStoreHandler.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestMetaStoreHandler.java
new file mode 100644
index 0000000..b86d736
--- /dev/null
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestMetaStoreHandler.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Derby;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Mssql;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Mysql;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Oracle;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Postgres;
+import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * QTestMetaStoreHandler is responsible for wrapping the logic of handling different metastore
+ * databases in qtests.
+ */
+public class QTestMetaStoreHandler {
+  private static final Logger LOG = LoggerFactory.getLogger(QTestMetaStoreHandler.class);
+
+  private String metastoreType;
+  private DatabaseRule rule;
+
+  public QTestMetaStoreHandler() {
+    this.metastoreType = QTestSystemProperties.getMetaStoreDb() == null ? "derby"
+      : QTestSystemProperties.getMetaStoreDb();
+
+    this.rule = getDatabaseRule(metastoreType).setVerbose(false);
+
+    LOG.info(String.format("initialized metastore type '%s' for qtests", metastoreType));
+  }
+
+  public DatabaseRule getRule() {
+    return rule;
+  }
+
+  public boolean isDerby() {
+    return "derby".equalsIgnoreCase(metastoreType);
+  }
+
+  public QTestMetaStoreHandler setMetaStoreConfiguration(HiveConf conf) {
+    conf.setVar(ConfVars.METASTOREDBTYPE, getDbTypeConfString());
+
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY, rule.getJdbcUrl());
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECTION_DRIVER, rule.getJdbcDriver());
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CONNECTION_USER_NAME, rule.getHiveUser());
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.PWD, rule.getHivePassword());
+
+    LOG.info(String.format("set metastore connection to url: %s",
+        MetastoreConf.getVar(conf, MetastoreConf.ConfVars.CONNECT_URL_KEY)));
+
+    return this;
+  }
+
+  private DatabaseRule getDatabaseRule(String metastoreType) {
+    switch (metastoreType) {
+    case "postgres":
+      return new Postgres();
+    case "oracle":
+      return new Oracle();
+    case "mysql":
+      return new Mysql();
+    case "mssql":
+    case "sqlserver":
+      return new Mssql();
+    default:
+      return new Derby();
+    }
+  }
+
+  private String getDbTypeConfString() {// "ORACLE", "MYSQL", "MSSQL", "POSTGRES"
+    return "sqlserver".equalsIgnoreCase(metastoreType) ? "MSSQL" : metastoreType.toUpperCase();
+  }
+
+  public void beforeTest() throws Exception {
+    getRule().before();
+    if (!isDerby()) {// derby is handled with old QTestUtil logic (TxnDbUtil stuff)
+      getRule().install();
+    }
+  }
+
+  public void afterTest(QTestUtil qt) throws Exception {
+    getRule().after();
+
+    // special qtest logic, which doesn't fit quite well into Derby.after()
+    if (isDerby()) {
+      TxnDbUtil.cleanDb(qt.getConf());
+      TxnDbUtil.prepDb(qt.getConf());
+    }
+  }
+
+  public void setSystemProperties() {
+    System.setProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY.getVarname(), rule.getJdbcUrl());
+    System.setProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER.getVarname(), rule.getJdbcDriver());
+    System.setProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME.getVarname(), rule.getHiveUser());
+    System.setProperty(MetastoreConf.ConfVars.PWD.getVarname(), rule.getHivePassword());
+  }
+}
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java
index f82d17e..89b9c01 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestSystemProperties.java
@@ -28,6 +28,7 @@ public class QTestSystemProperties {
   private static final String SYS_PROP_VECTORIZATION_ENABLED = "test.vectorization.enabled";
   private static final String SYS_PROP_CHECK_SYNTAX = "test.check.syntax";
   private static final String SYS_PROP_FORCE_EXCLUSIONS = "test.force.exclusions";
+  private static final String SYS_PROP_METASTORE_DB = "test.metastore.db";
   private static final String SYS_PROP_BUILD_DIR = "build.dir"; // typically target
 
   public static String getTempDir() {
@@ -50,6 +51,11 @@ public class QTestSystemProperties {
     return System.getProperty(SYS_PROP_BUILD_DIR);
   }
 
+  public static String getMetaStoreDb() {
+    return System.getProperty(SYS_PROP_METASTORE_DB) == null ? null
+      : System.getProperty(SYS_PROP_METASTORE_DB).toLowerCase();
+  }
+
   public static boolean isVectorizationEnabled() {
     return isTrue(SYS_PROP_VECTORIZATION_ENABLED);
   }
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index a105c05..51d3672 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -52,7 +52,6 @@ import org.apache.hadoop.hive.common.io.SessionStream;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
 import org.apache.hadoop.hive.ql.QTestMiniClusters.FsType;
 import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
 import org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler;
@@ -198,9 +197,8 @@ public class QTestUtil {
       System.out.println("Setting hive-site: " + HiveConf.getHiveSiteLocation());
     }
 
-    QueryState queryState = new QueryState.Builder().withHiveConf(new HiveConf(IDriver.class)).build();
-    conf = queryState.getConf();
-    sem = new SemanticAnalyzer(queryState);
+    conf = new HiveConf(IDriver.class);
+    setMetaStoreProperties();
 
     this.miniClusters.setup(testArgs, conf, getScriptsDir(), logDir);
 
@@ -219,11 +217,23 @@ public class QTestUtil {
     this.initScript = scriptsDir + File.separator + testArgs.getInitScript();
     this.cleanupScript = scriptsDir + File.separator + testArgs.getCleanupScript();
 
-    postInit();
     savedConf = new HiveConf(conf);
 
   }
 
+  private void setMetaStoreProperties() {
+    setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECT_URL_KEY);
+    setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_DRIVER);
+    setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.CONNECTION_USER_NAME);
+    setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars.PWD);
+  }
+
+  private void setMetastoreConfPropertyFromSystemProperty(MetastoreConf.ConfVars var) {
+    if (System.getProperty(var.getVarname()) != null) {
+      MetastoreConf.setVar(conf, var, System.getProperty(var.getVarname()));
+    }
+  }
+
   private String getScriptsDir() {
     // Use the current directory if it is not specified
     String scriptsDir = conf.get("test.data.scripts");
@@ -418,8 +428,6 @@ public class QTestUtil {
     clearUDFsCreatedDuringTests();
     clearKeysCreatedInTests();
     StatsSources.clearGlobalStats();
-    TxnDbUtil.cleanDb(conf);
-    TxnDbUtil.prepDb(conf);
     dispatcher.afterTest(this);
   }
 
@@ -441,11 +449,13 @@ public class QTestUtil {
     }
     conf.setBoolean("hive.test.shutdown.phase", true);
 
-    clearTablesCreatedDuringTests();
-    clearUDFsCreatedDuringTests();
     clearKeysCreatedInTests();
 
-    cleanupFromFile();
+    String metastoreDb = QTestSystemProperties.getMetaStoreDb();
+    if (metastoreDb == null || "derby".equalsIgnoreCase(metastoreDb)) {
+      // otherwise, the docker container is already destroyed by this time
+      cleanupFromFile();
+    }
 
     // delete any contents in the warehouse dir
     Path p = new Path(testWarehouse);
@@ -520,9 +530,11 @@ public class QTestUtil {
     }
   }
 
-  private void postInit() throws Exception {
+  public void postInit() throws Exception {
     miniClusters.postInit(conf);
 
+    sem = new SemanticAnalyzer(new QueryState.Builder().withHiveConf(conf).build());
+
     testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
 
     db = Hive.get(conf);
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
index 9a136e2..ab00bfa 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java
@@ -63,11 +63,6 @@ public class CoreParseNegative extends CliAdapter{
             .withCleanupScript(cleanupScript)
             .withLlapIo(false)
             .build());
-
-      qt.newSession();
-      qt.cleanUp();
-      qt.createSources();
-
     } catch (Exception e) {
       System.err.println("Exception: " + e.getMessage());
       e.printStackTrace();
@@ -102,6 +97,15 @@ public class CoreParseNegative extends CliAdapter{
     }
   }
 
+  protected boolean shouldRunCreateScriptBeforeEveryTest() {
+    return true;
+  }
+
+  @Override
+  protected QTestUtil getQt() {
+    return qt;
+  }
+
   @Override
   public void runTest(String tname, String fname, String fpath) throws Exception {
     long startTime = System.currentTimeMillis();
diff --git a/pom.xml b/pom.xml
index 068d048..7ac071d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -187,6 +187,7 @@
     <libfb303.version>0.9.3</libfb303.version>
     <libthrift.version>0.9.3-1</libthrift.version>
     <log4j2.version>2.12.1</log4j2.version>
+    <mariadb.version>2.5.0</mariadb.version>
     <opencsv.version>2.3</opencsv.version>
     <orc.version>1.5.6</orc.version>
     <mockito-core.version>1.10.19</mockito-core.version>
diff --git a/ql/src/test/queries/clientpositive/create_func1.q b/ql/src/test/queries/clientpositive/create_func1.q
index 2c6acfc..1f3b3ff 100644
--- a/ql/src/test/queries/clientpositive/create_func1.q
+++ b/ql/src/test/queries/clientpositive/create_func1.q
@@ -1,6 +1,6 @@
 --! qt:dataset:src
 
--- qtest_get_java_boolean should already be created during test initialization
+CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean';
 select qtest_get_java_boolean('true'), qtest_get_java_boolean('false') from src limit 1;
 
 describe function extended qtest_get_java_boolean;
@@ -46,6 +46,7 @@ use mydb;
 -- unqualified function should resolve to one in mydb db
 select qtest_get_java_boolean('abc'), default.qtest_get_java_boolean('abc'), mydb.qtest_get_java_boolean('abc') from default.src limit 1;
 
+drop function default.qtest_get_java_boolean;
 drop function mydb.qtest_get_java_boolean;
 
 drop database mydb cascade;
diff --git a/ql/src/test/queries/clientpositive/partition_params_postgres.q b/ql/src/test/queries/clientpositive/partition_params_postgres.q
new file mode 100644
index 0000000..e2ad530
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/partition_params_postgres.q
@@ -0,0 +1,5 @@
+drop table if exists my_table;
+create external table my_table (col1 int, col3 int) partitioned by (col2 string) STORED AS TEXTFILE TBLPROPERTIES ("serialization.format" = "1");
+insert into my_table VALUES(11, 201, "F");
+SELECT * from my_table;
+describe formatted my_table;
diff --git a/ql/src/test/results/clientpositive/create_func1.q.out b/ql/src/test/results/clientpositive/create_func1.q.out
index 238d378..377b91a 100644
--- a/ql/src/test/results/clientpositive/create_func1.q.out
+++ b/ql/src/test/results/clientpositive/create_func1.q.out
@@ -1,3 +1,11 @@
+PREHOOK: query: CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: database:default
+PREHOOK: Output: default.qtest_get_java_boolean
+POSTHOOK: query: CREATE FUNCTION qtest_get_java_boolean AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default.qtest_get_java_boolean
 PREHOOK: query: select qtest_get_java_boolean('true'), qtest_get_java_boolean('false') from src limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -206,6 +214,14 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 ABC	NULL	ABC
+PREHOOK: query: drop function default.qtest_get_java_boolean
+PREHOOK: type: DROPFUNCTION
+PREHOOK: Output: database:default
+PREHOOK: Output: default.qtest_get_java_boolean
+POSTHOOK: query: drop function default.qtest_get_java_boolean
+POSTHOOK: type: DROPFUNCTION
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default.qtest_get_java_boolean
 PREHOOK: query: drop function mydb.qtest_get_java_boolean
 PREHOOK: type: DROPFUNCTION
 PREHOOK: Output: database:mydb
diff --git a/ql/src/test/results/clientpositive/llap/sysdb.q.out b/ql/src/test/results/clientpositive/llap/sysdb.q.out
index 798b190..92f28d6 100644
--- a/ql/src/test/results/clientpositive/llap/sysdb.q.out
+++ b/ql/src/test/results/clientpositive/llap/sysdb.q.out
@@ -4911,7 +4911,6 @@ POSTHOOK: query: select func_name, func_type from funcs order by func_name, func
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@funcs
 #### A masked pattern was here ####
-qtest_get_java_boolean	1
 PREHOOK: query: select constraint_name from key_constraints order by constraint_name limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: sys@key_constraints
diff --git a/ql/src/test/results/clientpositive/partition_params_postgres.q.out b/ql/src/test/results/clientpositive/partition_params_postgres.q.out
new file mode 100644
index 0000000..f536fd2
--- /dev/null
+++ b/ql/src/test/results/clientpositive/partition_params_postgres.q.out
@@ -0,0 +1,76 @@
+PREHOOK: query: drop table if exists my_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists my_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create external table my_table (col1 int, col3 int) partitioned by (col2 string) STORED AS TEXTFILE TBLPROPERTIES ("serialization.format" = "1")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@my_table
+POSTHOOK: query: create external table my_table (col1 int, col3 int) partitioned by (col2 string) STORED AS TEXTFILE TBLPROPERTIES ("serialization.format" = "1")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@my_table
+PREHOOK: query: insert into my_table VALUES(11, 201, "F")
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@my_table
+POSTHOOK: query: insert into my_table VALUES(11, 201, "F")
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@my_table@col2=F
+POSTHOOK: Lineage: my_table PARTITION(col2=F).col1 SCRIPT []
+POSTHOOK: Lineage: my_table PARTITION(col2=F).col3 SCRIPT []
+PREHOOK: query: SELECT * from my_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@my_table
+PREHOOK: Input: default@my_table@col2=F
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * from my_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@my_table
+POSTHOOK: Input: default@my_table@col2=F
+#### A masked pattern was here ####
+11	201	F
+PREHOOK: query: describe formatted my_table
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@my_table
+POSTHOOK: query: describe formatted my_table
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@my_table
+# col_name            	data_type           	comment             
+col1                	int                 	                    
+col3                	int                 	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+col2                	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	EXTERNAL_TABLE      	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	EXTERNAL            	TRUE                
+	bucketing_version   	2                   
+	discover.partitions 	true                
+	numFiles            	1                   
+	numPartitions       	1                   
+	numRows             	1                   
+	rawDataSize         	6                   
+	serialization.format	1                   
+	totalSize           	7                   
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
diff --git a/ql/src/test/results/clientpositive/show_functions.q.out b/ql/src/test/results/clientpositive/show_functions.q.out
index 9db6845..0453400 100644
--- a/ql/src/test/results/clientpositive/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/show_functions.q.out
@@ -102,7 +102,6 @@ day
 dayofmonth
 dayofweek
 decode
-default.qtest_get_java_boolean
 degrees
 dense_rank
 div
@@ -538,7 +537,6 @@ day
 dayofmonth
 dayofweek
 decode
-default.qtest_get_java_boolean
 degrees
 dense_rank
 div
diff --git a/standalone-metastore/DEV-README b/standalone-metastore/DEV-README
index 9c26117..ab5df26 100644
--- a/standalone-metastore/DEV-README
+++ b/standalone-metastore/DEV-README
@@ -45,6 +45,12 @@ To run just one test, do
 
 mvn verify -DskipITests=false -Dit.test=ITestMysql -Dtest=nosuch
 
+Supported databases for testing:
+-Dit.test=ITestMysql
+-Dit.test=ITestOracle
+-Dit.test=ITestPostgres
+-Dit.test=ITestSqlServer
+
 You can download the Oracle driver at 
 http://www.oracle.com/technetwork/database/features/jdbc/index-091264.html
 You should download Oracle 11g Release 1, ojdbc6.jar
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
index 49e19ad..d27323a 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreSchemaInfo.java
@@ -142,10 +142,10 @@ public class MetaStoreSchemaInfo implements IMetaStoreSchemaInfo {
   @Override
   public String getCreateUserScript() throws HiveMetaException {
     String createScript = CREATE_USER_PREFIX + "." + dbType + SQL_FILE_EXTENSION;
+    File scriptFile = new File(getMetaStoreScriptDir() + File.separatorChar + createScript);
     // check if the file exists
-    if (!(new File(getMetaStoreScriptDir() + File.separatorChar +
-        createScript).exists())) {
-      throw new HiveMetaException("Unable to find create user file, expected: " + createScript);
+    if (!scriptFile.exists()) {
+      throw new HiveMetaException("Unable to find create user file, expected: " + scriptFile.getAbsolutePath());
     }
     return createScript;
   }
diff --git a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java
index 27b0483..b58b0f0 100644
--- a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java
+++ b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/schematool/MetastoreSchemaTool.java
@@ -196,8 +196,9 @@ public class MetastoreSchemaTool {
     return verbose;
   }
 
-  protected void setVerbose(boolean verbose) {
+  public MetastoreSchemaTool setVerbose(boolean verbose) {
     this.verbose = verbose;
+    return this;
   }
 
   protected void setDbOpts(String dbOpts) {
@@ -302,7 +303,9 @@ public class MetastoreSchemaTool {
 
   // Generate the beeline args per hive conf and execute the given script
   protected void execSql(String sqlScriptFile) throws IOException {
-    CommandBuilder builder = new CommandBuilder(conf, url, driver, userName, passWord, sqlScriptFile);
+    CommandBuilder builder =
+        new CommandBuilder(conf, url, driver, userName, passWord, sqlScriptFile)
+            .setVerbose(verbose);
 
     // run the script using SqlLine
     SqlLine sqlLine = new SqlLine();
@@ -351,6 +354,7 @@ public class MetastoreSchemaTool {
     protected final String sqlScriptFile;
     protected final String driver;
     protected final String url;
+    private boolean verbose = false;
 
     protected CommandBuilder(Configuration conf, String url, String driver, String userName,
                              String password, String sqlScriptFile) throws IOException {
@@ -363,12 +367,19 @@ public class MetastoreSchemaTool {
       this.sqlScriptFile = sqlScriptFile;
     }
 
+    public CommandBuilder setVerbose(boolean verbose) {
+      this.verbose = verbose;
+      return this;
+    }
+
     public String[] buildToRun() throws IOException {
       return argsWith(password);
     }
 
     public String buildToLog() throws IOException {
-      logScript();
+      if (verbose) {
+        logScript();
+      }
       return StringUtils.join(argsWith(PASSWD_MASK), " ");
     }
 
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java
index 6d8fd46..c1a1629 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java
@@ -17,249 +17,30 @@
  */
 package org.apache.hadoop.hive.metastore.dbinstall;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool;
-import org.junit.After;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule;
 import org.junit.Assert;
-import org.junit.Before;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
 
 public abstract class DbInstallBase {
-
-  private static final Logger LOG = LoggerFactory.getLogger(DbInstallBase.class);
-
-  private static final String HIVE_USER = "hiveuser";
-  protected static final String HIVE_DB = "hivedb";
   private static final String FIRST_VERSION = "1.2.0";
-  private static final int MAX_STARTUP_WAIT = 5 * 60 * 1000;
-
-  protected abstract String getDockerContainerName();
-  protected abstract String getDockerImageName();
-  protected abstract String[] getDockerAdditionalArgs();
-  protected abstract String getDbType();
-  protected abstract String getDbRootUser();
-  protected abstract String getDbRootPassword();
-  protected abstract String getJdbcDriver();
-  protected abstract String getJdbcUrl();
-  /**
-   * URL to use when connecting as root rather than Hive
-   * @return URL
-   */
-  protected abstract String getInitialJdbcUrl();
-
-  /**
-   * Determine if the docker container is ready to use.
-   * @param logOutput output of docker logs command
-   * @return true if ready, false otherwise
-   */
-  protected abstract boolean isContainerReady(String logOutput);
-  protected abstract String getHivePassword();
-
-  @Before
-  public void runDockerContainer() throws IOException, InterruptedException {
-    if (runCmdAndPrintStreams(buildRunCmd(), 600) != 0) {
-      throw new RuntimeException("Unable to start docker container");
-    }
-    long startTime = System.currentTimeMillis();
-    ProcessResults pr;
-    do {
-      Thread.sleep(5000);
-      pr = runCmd(buildLogCmd(), 5);
-      if (pr.rc != 0) throw new RuntimeException("Failed to get docker logs");
-    } while (startTime + MAX_STARTUP_WAIT >= System.currentTimeMillis() && !isContainerReady(pr.stdout));
-    if (startTime + MAX_STARTUP_WAIT < System.currentTimeMillis()) {
-      throw new RuntimeException("Container failed to be ready in " + MAX_STARTUP_WAIT/1000 +
-          " seconds");
-    }
-    MetastoreSchemaTool.setHomeDirForTesting();
-  }
-
-  @After
-  public void stopAndRmDockerContainer() throws IOException, InterruptedException {
-    if ("true".equalsIgnoreCase(System.getProperty("metastore.itest.no.stop.container"))) {
-      LOG.warn("Not stopping container " + getDockerContainerName() + " at user request, please " +
-          "be sure to shut it down before rerunning the test.");
-      return;
-    }
-    if (runCmdAndPrintStreams(buildStopCmd(), 60) != 0) {
-      throw new RuntimeException("Unable to stop docker container");
-    }
-    if (runCmdAndPrintStreams(buildRmCmd(), 15) != 0) {
-      throw new RuntimeException("Unable to remove docker container");
-    }
-  }
-
-  private static class ProcessResults {
-    final String stdout;
-    final String stderr;
-    final int rc;
-
-    public ProcessResults(String stdout, String stderr, int rc) {
-      this.stdout = stdout;
-      this.stderr = stderr;
-      this.rc = rc;
-    }
-  }
-
-  private ProcessResults runCmd(String[] cmd, long secondsToWait) throws IOException,
-      InterruptedException {
-    LOG.info("Going to run: " + StringUtils.join(cmd, " "));
-    Process proc = Runtime.getRuntime().exec(cmd);
-    if (!proc.waitFor(secondsToWait, TimeUnit.SECONDS)) {
-      throw new RuntimeException("Process " + cmd[0] + " failed to run in " + secondsToWait +
-          " seconds");
-    }
-    BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream()));
-    final StringBuilder lines = new StringBuilder();
-    reader.lines()
-        .forEach(s -> lines.append(s).append('\n'));
-
-    reader = new BufferedReader(new InputStreamReader(proc.getErrorStream()));
-    final StringBuilder errLines = new StringBuilder();
-    reader.lines()
-        .forEach(s -> errLines.append(s).append('\n'));
-    return new ProcessResults(lines.toString(), errLines.toString(), proc.exitValue());
-  }
-
-  private int runCmdAndPrintStreams(String[] cmd, long secondsToWait)
-      throws InterruptedException, IOException {
-    ProcessResults results = runCmd(cmd, secondsToWait);
-    LOG.info("Stdout from proc: " + results.stdout);
-    LOG.info("Stderr from proc: " + results.stderr);
-    return results.rc;
-  }
-
-  private int createUser() {
-    return new MetastoreSchemaTool().run(buildArray(
-        "-createUser",
-        "-dbType",
-        getDbType(),
-        "-userName",
-        getDbRootUser(),
-        "-passWord",
-        getDbRootPassword(),
-        "-hiveUser",
-        HIVE_USER,
-        "-hivePassword",
-        getHivePassword(),
-        "-hiveDb",
-        HIVE_DB,
-        "-url",
-        getInitialJdbcUrl(),
-        "-driver",
-        getJdbcDriver()
-    ));
-  }
-
-  private int installLatest() {
-    return new MetastoreSchemaTool().run(buildArray(
-        "-initSchema",
-        "-dbType",
-        getDbType(),
-        "-userName",
-        HIVE_USER,
-        "-passWord",
-        getHivePassword(),
-        "-url",
-        getJdbcUrl(),
-        "-driver",
-        getJdbcDriver()
-    ));
-  }
-
-  private int installAVersion(String version) {
-    return new MetastoreSchemaTool().run(buildArray(
-        "-initSchemaTo",
-        version,
-        "-dbType",
-        getDbType(),
-        "-userName",
-        HIVE_USER,
-        "-passWord",
-        getHivePassword(),
-        "-url",
-        getJdbcUrl(),
-        "-driver",
-        getJdbcDriver()
-    ));
-  }
-
-  private int upgradeToLatest() {
-    return new MetastoreSchemaTool().run(buildArray(
-        "-upgradeSchema",
-        "-dbType",
-        getDbType(),
-        "-userName",
-        HIVE_USER,
-        "-passWord",
-        getHivePassword(),
-        "-url",
-        getJdbcUrl(),
-        "-driver",
-        getJdbcDriver()
-    ));
-  }
-
-  protected String[] buildArray(String... strs) {
-    return strs;
-  }
 
   @Test
   public void install() {
-    Assert.assertEquals(0, createUser());
-    Assert.assertEquals(0, installLatest());
+    Assert.assertEquals(0, getRule().createUser());
+    Assert.assertEquals(0, getRule().installLatest());
   }
 
   @Test
   public void upgrade() throws HiveMetaException {
-    Assert.assertEquals(0, createUser());
-    Assert.assertEquals(0, installAVersion(FIRST_VERSION));
-    Assert.assertEquals(0, upgradeToLatest());
+    Assert.assertEquals(0, getRule().createUser());
+    Assert.assertEquals(0, getRule().installAVersion(FIRST_VERSION));
+    Assert.assertEquals(0, getRule().upgradeToLatest());
   }
 
-  private String[] buildRunCmd() {
-    List<String> cmd = new ArrayList<>(4 + getDockerAdditionalArgs().length);
-    cmd.add("docker");
-    cmd.add("run");
-    cmd.add("--name");
-    cmd.add(getDockerContainerName());
-    cmd.addAll(Arrays.asList(getDockerAdditionalArgs()));
-    cmd.add(getDockerImageName());
-    return cmd.toArray(new String[cmd.size()]);
-  }
-
-  private String[] buildStopCmd() {
-    return buildArray(
-        "docker",
-        "stop",
-        getDockerContainerName()
-    );
-  }
+  protected abstract DatabaseRule getRule();
 
-  private String[] buildRmCmd() {
-    return buildArray(
-        "docker",
-        "rm",
-        getDockerContainerName()
-    );
-  }
-
-  private String[] buildLogCmd() {
-    return buildArray(
-        "docker",
-        "logs",
-        getDockerContainerName()
-    );
+  protected String[] buildArray(String... strs) {
+    return strs;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java
index 9999d8d..1c36468 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,66 +17,20 @@
  */
 package org.apache.hadoop.hive.metastore.dbinstall;
 
-public class ITestMysql extends DbInstallBase {
-
-  @Override
-  protected String getDockerImageName() {
-    return "mariadb:5.5";
-  }
-
-  @Override
-  protected String[] getDockerAdditionalArgs() {
-    return buildArray(
-        "-p",
-        "3306:3306",
-        "-e",
-        "MYSQL_ROOT_PASSWORD=" + getDbRootPassword(),
-        "-d"
-    );
-  }
-
-  @Override
-  protected String getDbType() {
-    return "mysql";
-  }
-
-  @Override
-  protected String getDbRootUser() {
-    return "root";
-  }
-
-  @Override
-  protected String getDbRootPassword() {
-    return "its-a-secret";
-  }
+import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Mysql;
+import org.junit.Rule;
 
-  @Override
-  protected String getJdbcDriver() {
-    return org.mariadb.jdbc.Driver.class.getName();
-  }
-
-  @Override
-  protected String getJdbcUrl() {
-    return "jdbc:mysql://localhost:3306/" + HIVE_DB;
-  }
-
-  @Override
-  protected String getInitialJdbcUrl() {
-    return "jdbc:mysql://localhost:3306/";
-  }
-
-  @Override
-  protected boolean isContainerReady(String logOutput) {
-    return logOutput.contains("MySQL init process done. Ready for start up.");
-  }
+/**
+ * Mysql-specific DbInstallBase child test class.
+ */
+public class ITestMysql extends DbInstallBase {
 
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-mysql-install";
-  }
+  @Rule
+  public final DatabaseRule databaseRule = new Mysql();
 
   @Override
-  protected String getHivePassword() {
-    return "hivepassword";
+  protected DatabaseRule getRule() {
+    return databaseRule;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java
index 5b93e0f..b2de064 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,67 +17,20 @@
  */
 package org.apache.hadoop.hive.metastore.dbinstall;
 
-public class ITestOracle extends DbInstallBase {
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-oracle-install";
-  }
-
-  @Override
-  protected String getDockerImageName() {
-    return "orangehrm/oracle-xe-11g";
-  }
-
-  @Override
-  protected String[] getDockerAdditionalArgs() {
-    return buildArray(
-        "-p",
-        "1521:1521",
-        "-e",
-        "DEFAULT_SYS_PASS=" + getDbRootPassword(),
-        "-e",
-        "ORACLE_ALLOW_REMOTE=true",
-        "-d"
-    );
-  }
-
-  @Override
-  protected String getDbType() {
-    return "oracle";
-  }
-
-  @Override
-  protected String getDbRootUser() {
-    return "SYS as SYSDBA";
-  }
+import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Oracle;
+import org.junit.Rule;
 
-  @Override
-  protected String getDbRootPassword() {
-    return "oracle";
-  }
-
-  @Override
-  protected String getJdbcDriver() {
-    return "oracle.jdbc.OracleDriver";
-  }
-
-  @Override
-  protected String getJdbcUrl() {
-    return "jdbc:oracle:thin:@//localhost:1521/xe";
-  }
-
-  @Override
-  protected String getInitialJdbcUrl() {
-    return "jdbc:oracle:thin:@//localhost:1521/xe";
-  }
+/**
+ * Oracle-specific DbInstallBase child test class.
+ */
+public class ITestOracle extends DbInstallBase {
 
-  @Override
-  protected boolean isContainerReady(String logOutput) {
-    return logOutput.contains("Oracle started successfully!");
-  }
+  @Rule
+  public final DatabaseRule databaseRule = new Oracle();
 
   @Override
-  protected String getHivePassword() {
-    return "hivepassword";
+  protected DatabaseRule getRule() {
+    return databaseRule;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java
index 9151ac7..1e43d4f 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,66 +17,20 @@
  */
 package org.apache.hadoop.hive.metastore.dbinstall;
 
-public class ITestPostgres extends DbInstallBase {
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-postgres-install";
-  }
-
-  @Override
-  protected String getDockerImageName() {
-    return "postgres:9.3";
-  }
-
-  @Override
-  protected String[] getDockerAdditionalArgs() {
-    return buildArray(
-        "-p",
-        "5432:5432",
-        "-e",
-        "POSTGRES_PASSWORD=" + getDbRootPassword(),
-        "-d"
-
-    );
-  }
-
-  @Override
-  protected String getDbType() {
-    return "postgres";
-  }
-
-  @Override
-  protected String getDbRootUser() {
-    return "postgres";
-  }
+import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Postgres;
+import org.junit.Rule;
 
-  @Override
-  protected String getDbRootPassword() {
-    return "its-a-secret";
-  }
-
-  @Override
-  protected String getJdbcDriver() {
-    return org.postgresql.Driver.class.getName();
-  }
-
-  @Override
-  protected String getJdbcUrl() {
-    return "jdbc:postgresql://localhost:5432/" + HIVE_DB;
-  }
-
-  @Override
-  protected String getInitialJdbcUrl() {
-    return "jdbc:postgresql://localhost:5432/postgres";
-  }
+/**
+ * Postgres-specific DbInstallBase child test class.
+ */
+public class ITestPostgres extends DbInstallBase {
 
-  @Override
-  protected boolean isContainerReady(String logOutput) {
-    return logOutput.contains("database system is ready to accept connections");
-  }
+  @Rule
+  public final DatabaseRule databaseRule = new Postgres();
 
   @Override
-  protected String getHivePassword() {
-    return "hivepassword";
+  protected DatabaseRule getRule() {
+    return databaseRule;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java
index 67b6eee..6ec0e87 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,68 +17,20 @@
  */
 package org.apache.hadoop.hive.metastore.dbinstall;
 
-public class ITestSqlServer extends DbInstallBase {
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-mssql-install";
-  }
-
-  @Override
-  protected String getDockerImageName() {
-    return "microsoft/mssql-server-linux:2017-GA";
-  }
-
-  @Override
-  protected String[] getDockerAdditionalArgs() {
-    return buildArray(
-        "-p",
-        "1433:1433",
-        "-e",
-        "ACCEPT_EULA=Y",
-        "-e",
-        "SA_PASSWORD=" + getDbRootPassword(),
-        "-d"
-    );
-  }
-
-  @Override
-  protected String getDbType() {
-    return "mssql";
-  }
-
-  @Override
-  protected String getDbRootUser() {
-    return "SA";
-  }
+import org.apache.hadoop.hive.metastore.dbinstall.rules.DatabaseRule;
+import org.apache.hadoop.hive.metastore.dbinstall.rules.Mssql;
+import org.junit.Rule;
 
-  @Override
-  protected String getDbRootPassword() {
-    return "Its-a-s3cret";
-  }
-
-  @Override
-  protected String getJdbcDriver() {
-    return com.microsoft.sqlserver.jdbc.SQLServerDriver.class.getName();
-    //return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
-  }
-
-  @Override
-  protected String getJdbcUrl() {
-    return "jdbc:sqlserver://localhost:1433;DatabaseName=" + HIVE_DB + ";";
-  }
-
-  @Override
-  protected String getInitialJdbcUrl() {
-    return  "jdbc:sqlserver://localhost:1433";
-  }
+/**
+ * Mssql-specific DbInstallBase child test class.
+ */
+public class ITestSqlServer extends DbInstallBase {
 
-  @Override
-  protected boolean isContainerReady(String logOutput) {
-    return logOutput.contains("Recovery is complete. This is an informational message only. No user action is required.");
-  }
+  @Rule
+  public final DatabaseRule databaseRule = new Mssql();
 
   @Override
-  protected String getHivePassword() {
-    return "h1vePassword!";
+  protected DatabaseRule getRule() {
+    return databaseRule;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/DatabaseRule.java
similarity index 65%
copy from standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java
copy to standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/DatabaseRule.java
index 6d8fd46..c1f49d8 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/DbInstallBase.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/DatabaseRule.java
@@ -6,26 +6,16 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.metastore.dbinstall;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.HiveMetaException;
-import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+package org.apache.hadoop.hive.metastore.dbinstall.rules;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -35,39 +25,84 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
-public abstract class DbInstallBase {
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool;
+import org.junit.rules.ExternalResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-  private static final Logger LOG = LoggerFactory.getLogger(DbInstallBase.class);
+/**
+ * Abstract JUnit TestRule for different RDMBS types.
+ */
+public abstract class DatabaseRule extends ExternalResource {
+  private static final Logger LOG = LoggerFactory.getLogger(DatabaseRule.class);
 
-  private static final String HIVE_USER = "hiveuser";
+  protected static final String HIVE_USER = "hiveuser";
+  // used in most of the RDBMS configs, except MSSQL
+  protected static final String HIVE_PASSWORD = "hivepassword";
   protected static final String HIVE_DB = "hivedb";
-  private static final String FIRST_VERSION = "1.2.0";
   private static final int MAX_STARTUP_WAIT = 5 * 60 * 1000;
 
-  protected abstract String getDockerContainerName();
-  protected abstract String getDockerImageName();
-  protected abstract String[] getDockerAdditionalArgs();
-  protected abstract String getDbType();
-  protected abstract String getDbRootUser();
-  protected abstract String getDbRootPassword();
-  protected abstract String getJdbcDriver();
-  protected abstract String getJdbcUrl();
+  public abstract String getHivePassword();
+
+  public abstract String getDockerImageName();
+
+  public abstract String[] getDockerAdditionalArgs();
+
+  public abstract String getDbType();
+
+  public abstract String getDbRootUser();
+
+  public abstract String getDbRootPassword();
+
+  public abstract String getJdbcDriver();
+
+  public abstract String getJdbcUrl();
+
+  private boolean verbose;
+
+  public DatabaseRule setVerbose(boolean verbose) {
+    this.verbose = verbose;
+    return this;
+  };
+
+  public String getDb() {
+    return HIVE_DB;
+  };
+
   /**
    * URL to use when connecting as root rather than Hive
+   *
    * @return URL
    */
-  protected abstract String getInitialJdbcUrl();
+  public abstract String getInitialJdbcUrl();
 
   /**
    * Determine if the docker container is ready to use.
+   *
    * @param logOutput output of docker logs command
    * @return true if ready, false otherwise
    */
-  protected abstract boolean isContainerReady(String logOutput);
-  protected abstract String getHivePassword();
+  public abstract boolean isContainerReady(String logOutput);
+
+  protected String[] buildArray(String... strs) {
+    return strs;
+  }
+
+  private static class ProcessResults {
+    final String stdout;
+    final String stderr;
+    final int rc;
+
+    public ProcessResults(String stdout, String stderr, int rc) {
+      this.stdout = stdout;
+      this.stderr = stderr;
+      this.rc = rc;
+    }
+  }
 
-  @Before
-  public void runDockerContainer() throws IOException, InterruptedException {
+  @Override
+  public void before() throws Exception { //runDockerContainer
     if (runCmdAndPrintStreams(buildRunCmd(), 600) != 0) {
       throw new RuntimeException("Unable to start docker container");
     }
@@ -76,7 +111,9 @@ public abstract class DbInstallBase {
     do {
       Thread.sleep(5000);
       pr = runCmd(buildLogCmd(), 5);
-      if (pr.rc != 0) throw new RuntimeException("Failed to get docker logs");
+      if (pr.rc != 0) {
+        throw new RuntimeException("Failed to get docker logs");
+      }
     } while (startTime + MAX_STARTUP_WAIT >= System.currentTimeMillis() && !isContainerReady(pr.stdout));
     if (startTime + MAX_STARTUP_WAIT < System.currentTimeMillis()) {
       throw new RuntimeException("Container failed to be ready in " + MAX_STARTUP_WAIT/1000 +
@@ -85,50 +122,44 @@ public abstract class DbInstallBase {
     MetastoreSchemaTool.setHomeDirForTesting();
   }
 
-  @After
-  public void stopAndRmDockerContainer() throws IOException, InterruptedException {
+  @Override
+  public void after() { // stopAndRmDockerContainer
     if ("true".equalsIgnoreCase(System.getProperty("metastore.itest.no.stop.container"))) {
-      LOG.warn("Not stopping container " + getDockerContainerName() + " at user request, please " +
-          "be sure to shut it down before rerunning the test.");
+      LOG.warn("Not stopping container " + getDockerContainerName() + " at user request, please "
+          + "be sure to shut it down before rerunning the test.");
       return;
     }
-    if (runCmdAndPrintStreams(buildStopCmd(), 60) != 0) {
-      throw new RuntimeException("Unable to stop docker container");
-    }
-    if (runCmdAndPrintStreams(buildRmCmd(), 15) != 0) {
-      throw new RuntimeException("Unable to remove docker container");
+    try {
+      if (runCmdAndPrintStreams(buildStopCmd(), 60) != 0) {
+        throw new RuntimeException("Unable to stop docker container");
+      }
+      if (runCmdAndPrintStreams(buildRmCmd(), 15) != 0) {
+        throw new RuntimeException("Unable to remove docker container");
+      }
+    } catch (InterruptedException | IOException e) {
+      e.printStackTrace();
     }
   }
 
-  private static class ProcessResults {
-    final String stdout;
-    final String stderr;
-    final int rc;
+  protected String getDockerContainerName(){
+    return String.format("metastore-test-%s-install", getDbType());
+  };
 
-    public ProcessResults(String stdout, String stderr, int rc) {
-      this.stdout = stdout;
-      this.stderr = stderr;
-      this.rc = rc;
-    }
-  }
-
-  private ProcessResults runCmd(String[] cmd, long secondsToWait) throws IOException,
-      InterruptedException {
+  private ProcessResults runCmd(String[] cmd, long secondsToWait)
+      throws IOException, InterruptedException {
     LOG.info("Going to run: " + StringUtils.join(cmd, " "));
     Process proc = Runtime.getRuntime().exec(cmd);
     if (!proc.waitFor(secondsToWait, TimeUnit.SECONDS)) {
-      throw new RuntimeException("Process " + cmd[0] + " failed to run in " + secondsToWait +
-          " seconds");
+      throw new RuntimeException(
+          "Process " + cmd[0] + " failed to run in " + secondsToWait + " seconds");
     }
     BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream()));
     final StringBuilder lines = new StringBuilder();
-    reader.lines()
-        .forEach(s -> lines.append(s).append('\n'));
+    reader.lines().forEach(s -> lines.append(s).append('\n'));
 
     reader = new BufferedReader(new InputStreamReader(proc.getErrorStream()));
     final StringBuilder errLines = new StringBuilder();
-    reader.lines()
-        .forEach(s -> errLines.append(s).append('\n'));
+    reader.lines().forEach(s -> errLines.append(s).append('\n'));
     return new ProcessResults(lines.toString(), errLines.toString(), proc.exitValue());
   }
 
@@ -140,8 +171,47 @@ public abstract class DbInstallBase {
     return results.rc;
   }
 
-  private int createUser() {
-    return new MetastoreSchemaTool().run(buildArray(
+  private String[] buildRunCmd() {
+    List<String> cmd = new ArrayList<>(4 + getDockerAdditionalArgs().length);
+    cmd.add("docker");
+    cmd.add("run");
+    cmd.add("--name");
+    cmd.add(getDockerContainerName());
+    cmd.addAll(Arrays.asList(getDockerAdditionalArgs()));
+    cmd.add(getDockerImageName());
+    return cmd.toArray(new String[cmd.size()]);
+  }
+
+  private String[] buildStopCmd() {
+    return buildArray(
+        "docker",
+        "stop",
+        getDockerContainerName()
+    );
+  }
+
+  private String[] buildRmCmd() {
+    return buildArray(
+        "docker",
+        "rm",
+        getDockerContainerName()
+    );
+  }
+
+  private String[] buildLogCmd() {
+    return buildArray(
+        "docker",
+        "logs",
+        getDockerContainerName()
+    );
+  }
+
+  public String getHiveUser(){
+    return HIVE_USER;
+  }
+
+  public int createUser() {
+    return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray(
         "-createUser",
         "-dbType",
         getDbType(),
@@ -150,11 +220,11 @@ public abstract class DbInstallBase {
         "-passWord",
         getDbRootPassword(),
         "-hiveUser",
-        HIVE_USER,
+        getHiveUser(),
         "-hivePassword",
         getHivePassword(),
         "-hiveDb",
-        HIVE_DB,
+        getDb(),
         "-url",
         getInitialJdbcUrl(),
         "-driver",
@@ -162,13 +232,13 @@ public abstract class DbInstallBase {
     ));
   }
 
-  private int installLatest() {
-    return new MetastoreSchemaTool().run(buildArray(
+  public int installLatest() {
+    return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray(
         "-initSchema",
         "-dbType",
         getDbType(),
         "-userName",
-        HIVE_USER,
+        getHiveUser(),
         "-passWord",
         getHivePassword(),
         "-url",
@@ -178,14 +248,14 @@ public abstract class DbInstallBase {
     ));
   }
 
-  private int installAVersion(String version) {
-    return new MetastoreSchemaTool().run(buildArray(
+  public int installAVersion(String version) {
+    return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray(
         "-initSchemaTo",
         version,
         "-dbType",
         getDbType(),
         "-userName",
-        HIVE_USER,
+        getHiveUser(),
         "-passWord",
         getHivePassword(),
         "-url",
@@ -195,8 +265,8 @@ public abstract class DbInstallBase {
     ));
   }
 
-  private int upgradeToLatest() {
-    return new MetastoreSchemaTool().run(buildArray(
+  public int upgradeToLatest() {
+    return new MetastoreSchemaTool().setVerbose(verbose).run(buildArray(
         "-upgradeSchema",
         "-dbType",
         getDbType(),
@@ -211,55 +281,8 @@ public abstract class DbInstallBase {
     ));
   }
 
-  protected String[] buildArray(String... strs) {
-    return strs;
-  }
-
-  @Test
   public void install() {
-    Assert.assertEquals(0, createUser());
-    Assert.assertEquals(0, installLatest());
-  }
-
-  @Test
-  public void upgrade() throws HiveMetaException {
-    Assert.assertEquals(0, createUser());
-    Assert.assertEquals(0, installAVersion(FIRST_VERSION));
-    Assert.assertEquals(0, upgradeToLatest());
-  }
-
-  private String[] buildRunCmd() {
-    List<String> cmd = new ArrayList<>(4 + getDockerAdditionalArgs().length);
-    cmd.add("docker");
-    cmd.add("run");
-    cmd.add("--name");
-    cmd.add(getDockerContainerName());
-    cmd.addAll(Arrays.asList(getDockerAdditionalArgs()));
-    cmd.add(getDockerImageName());
-    return cmd.toArray(new String[cmd.size()]);
-  }
-
-  private String[] buildStopCmd() {
-    return buildArray(
-        "docker",
-        "stop",
-        getDockerContainerName()
-    );
-  }
-
-  private String[] buildRmCmd() {
-    return buildArray(
-        "docker",
-        "rm",
-        getDockerContainerName()
-    );
-  }
-
-  private String[] buildLogCmd() {
-    return buildArray(
-        "docker",
-        "logs",
-        getDockerContainerName()
-    );
+    createUser();
+    installLatest();
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Derby.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Derby.java
new file mode 100644
index 0000000..6415d7e
--- /dev/null
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Derby.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.dbinstall.rules;
+
+import org.apache.hadoop.hive.metastore.tools.schematool.MetastoreSchemaTool;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
+
+/**
+ * JUnit TestRule for Derby.
+ */
+public class Derby extends DatabaseRule {
+
+  @Override
+  public String getDockerImageName() {
+    return null;
+  }
+
+  @Override
+  public String[] getDockerAdditionalArgs() {
+    return null;
+  }
+
+  @Override
+  public String getDbType() {
+    return "derby";
+  }
+
+  @Override
+  public String getDbRootUser() {
+    return "APP";
+  }
+
+  @Override
+  public String getHiveUser() {
+    return "APP";
+  }
+
+  @Override
+  public String getDbRootPassword() {
+    return "mine";
+  }
+
+  @Override
+  public String getHivePassword() {
+    return "mine";
+  }
+
+  @Override
+  public String getJdbcDriver() {
+    return "org.apache.derby.jdbc.EmbeddedDriver";
+  }
+
+  @Override
+  public String getJdbcUrl() {
+    return String.format("jdbc:derby:memory:%s/%s;create=true", System.getProperty("test.tmp.dir"),
+        getDb());
+  }
+
+  @Override
+  public String getInitialJdbcUrl() {
+    return String.format("jdbc:derby:memory:%s/%s;create=true", System.getProperty("test.tmp.dir"),
+        getDb());
+  }
+
+  public String getDb() {
+    return MetaStoreServerUtils.JUNIT_DATABASE_PREFIX;
+  };
+
+  @Override
+  public boolean isContainerReady(String logOutput) {
+    return true;
+  }
+
+  @Override
+  public void before() throws Exception {
+    MetastoreSchemaTool.setHomeDirForTesting();
+  }
+
+  @Override
+  public void after() {
+    // no-op, no need for docker container for derby
+  }
+
+  @Override
+  public int createUser() {
+    return 0; // no-op
+  }
+}
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mssql.java
similarity index 63%
copy from standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java
copy to standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mssql.java
index 67b6eee..f999481 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestSqlServer.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mssql.java
@@ -15,21 +15,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.metastore.dbinstall;
+package org.apache.hadoop.hive.metastore.dbinstall.rules;
 
-public class ITestSqlServer extends DbInstallBase {
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-mssql-install";
-  }
+/**
+ * JUnit TestRule for Mssql.
+ */
+public class Mssql extends DatabaseRule {
 
   @Override
-  protected String getDockerImageName() {
+  public String getDockerImageName() {
     return "microsoft/mssql-server-linux:2017-GA";
   }
 
   @Override
-  protected String[] getDockerAdditionalArgs() {
+  public String[] getDockerAdditionalArgs() {
     return buildArray(
         "-p",
         "1433:1433",
@@ -42,43 +41,44 @@ public class ITestSqlServer extends DbInstallBase {
   }
 
   @Override
-  protected String getDbType() {
+  public String getDbType() {
     return "mssql";
   }
 
   @Override
-  protected String getDbRootUser() {
+  public String getDbRootUser() {
     return "SA";
   }
 
   @Override
-  protected String getDbRootPassword() {
+  public String getDbRootPassword() {
     return "Its-a-s3cret";
   }
 
   @Override
-  protected String getJdbcDriver() {
+  public String getJdbcDriver() {
     return com.microsoft.sqlserver.jdbc.SQLServerDriver.class.getName();
-    //return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
+    // return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
   }
 
   @Override
-  protected String getJdbcUrl() {
+  public String getJdbcUrl() {
     return "jdbc:sqlserver://localhost:1433;DatabaseName=" + HIVE_DB + ";";
   }
 
   @Override
-  protected String getInitialJdbcUrl() {
-    return  "jdbc:sqlserver://localhost:1433";
+  public String getInitialJdbcUrl() {
+    return "jdbc:sqlserver://localhost:1433";
   }
 
   @Override
-  protected boolean isContainerReady(String logOutput) {
-    return logOutput.contains("Recovery is complete. This is an informational message only. No user action is required.");
+  public boolean isContainerReady(String logOutput) {
+    return logOutput.contains(
+        "Recovery is complete. This is an informational message only. No user action is required.");
   }
 
   @Override
-  protected String getHivePassword() {
+  public String getHivePassword() {
     return "h1vePassword!";
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mysql.java
similarity index 62%
copy from standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java
copy to standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mysql.java
index 9999d8d..c537d95 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestMysql.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Mysql.java
@@ -15,68 +15,60 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.metastore.dbinstall;
+package org.apache.hadoop.hive.metastore.dbinstall.rules;
 
-public class ITestMysql extends DbInstallBase {
+/**
+ * JUnit TestRule for MySql.
+ */
+public class Mysql extends DatabaseRule {
 
   @Override
-  protected String getDockerImageName() {
+  public String getDockerImageName() {
     return "mariadb:5.5";
   }
 
   @Override
-  protected String[] getDockerAdditionalArgs() {
-    return buildArray(
-        "-p",
-        "3306:3306",
-        "-e",
-        "MYSQL_ROOT_PASSWORD=" + getDbRootPassword(),
-        "-d"
-    );
+  public String[] getDockerAdditionalArgs() {
+    return buildArray("-p", "3306:3306", "-e", "MYSQL_ROOT_PASSWORD=" + getDbRootPassword(), "-d");
   }
 
   @Override
-  protected String getDbType() {
+  public String getDbType() {
     return "mysql";
   }
 
   @Override
-  protected String getDbRootUser() {
+  public String getDbRootUser() {
     return "root";
   }
 
   @Override
-  protected String getDbRootPassword() {
+  public String getDbRootPassword() {
     return "its-a-secret";
   }
 
   @Override
-  protected String getJdbcDriver() {
+  public String getJdbcDriver() {
     return org.mariadb.jdbc.Driver.class.getName();
   }
 
   @Override
-  protected String getJdbcUrl() {
+  public String getJdbcUrl() {
     return "jdbc:mysql://localhost:3306/" + HIVE_DB;
   }
 
   @Override
-  protected String getInitialJdbcUrl() {
+  public String getInitialJdbcUrl() {
     return "jdbc:mysql://localhost:3306/";
   }
 
   @Override
-  protected boolean isContainerReady(String logOutput) {
+  public boolean isContainerReady(String logOutput) {
     return logOutput.contains("MySQL init process done. Ready for start up.");
   }
 
   @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-mysql-install";
-  }
-
-  @Override
-  protected String getHivePassword() {
-    return "hivepassword";
+  public String getHivePassword() {
+    return HIVE_PASSWORD;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Oracle.java
similarity index 69%
copy from standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java
copy to standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Oracle.java
index 5b93e0f..0b070e1 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestOracle.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Oracle.java
@@ -15,21 +15,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.metastore.dbinstall;
+package org.apache.hadoop.hive.metastore.dbinstall.rules;
 
-public class ITestOracle extends DbInstallBase {
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-oracle-install";
-  }
+/**
+ * JUnit TestRule for Oracle.
+ */
+public class Oracle extends DatabaseRule {
 
   @Override
-  protected String getDockerImageName() {
+  public String getDockerImageName() {
     return "orangehrm/oracle-xe-11g";
   }
 
   @Override
-  protected String[] getDockerAdditionalArgs() {
+  public String[] getDockerAdditionalArgs() {
     return buildArray(
         "-p",
         "1521:1521",
@@ -42,42 +41,42 @@ public class ITestOracle extends DbInstallBase {
   }
 
   @Override
-  protected String getDbType() {
+  public String getDbType() {
     return "oracle";
   }
 
   @Override
-  protected String getDbRootUser() {
+  public String getDbRootUser() {
     return "SYS as SYSDBA";
   }
 
   @Override
-  protected String getDbRootPassword() {
+  public String getDbRootPassword() {
     return "oracle";
   }
 
   @Override
-  protected String getJdbcDriver() {
+  public String getJdbcDriver() {
     return "oracle.jdbc.OracleDriver";
   }
 
   @Override
-  protected String getJdbcUrl() {
+  public String getJdbcUrl() {
     return "jdbc:oracle:thin:@//localhost:1521/xe";
   }
 
   @Override
-  protected String getInitialJdbcUrl() {
+  public String getInitialJdbcUrl() {
     return "jdbc:oracle:thin:@//localhost:1521/xe";
   }
 
   @Override
-  protected boolean isContainerReady(String logOutput) {
+  public boolean isContainerReady(String logOutput) {
     return logOutput.contains("Oracle started successfully!");
   }
 
   @Override
-  protected String getHivePassword() {
-    return "hivepassword";
+  public String getHivePassword() {
+    return HIVE_PASSWORD;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Postgres.java
similarity index 62%
copy from standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java
copy to standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Postgres.java
index 9151ac7..5840095 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/ITestPostgres.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/dbinstall/rules/Postgres.java
@@ -15,68 +15,59 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.metastore.dbinstall;
-
-public class ITestPostgres extends DbInstallBase {
-  @Override
-  protected String getDockerContainerName() {
-    return "metastore-test-postgres-install";
-  }
+package org.apache.hadoop.hive.metastore.dbinstall.rules;
 
+/**
+ * JUnit TestRule for Postgres.
+ */
+public class Postgres extends DatabaseRule {
   @Override
-  protected String getDockerImageName() {
+  public String getDockerImageName() {
     return "postgres:9.3";
   }
 
   @Override
-  protected String[] getDockerAdditionalArgs() {
-    return buildArray(
-        "-p",
-        "5432:5432",
-        "-e",
-        "POSTGRES_PASSWORD=" + getDbRootPassword(),
-        "-d"
-
-    );
+  public String[] getDockerAdditionalArgs() {
+    return buildArray("-p", "5432:5432", "-e", "POSTGRES_PASSWORD=" + getDbRootPassword(), "-d");
   }
 
   @Override
-  protected String getDbType() {
+  public String getDbType() {
     return "postgres";
   }
 
   @Override
-  protected String getDbRootUser() {
+  public String getDbRootUser() {
     return "postgres";
   }
 
   @Override
-  protected String getDbRootPassword() {
+  public String getDbRootPassword() {
     return "its-a-secret";
   }
 
   @Override
-  protected String getJdbcDriver() {
+  public String getJdbcDriver() {
     return org.postgresql.Driver.class.getName();
   }
 
   @Override
-  protected String getJdbcUrl() {
+  public String getJdbcUrl() {
     return "jdbc:postgresql://localhost:5432/" + HIVE_DB;
   }
 
   @Override
-  protected String getInitialJdbcUrl() {
+  public String getInitialJdbcUrl() {
     return "jdbc:postgresql://localhost:5432/postgres";
   }
 
   @Override
-  protected boolean isContainerReady(String logOutput) {
+  public boolean isContainerReady(String logOutput) {
     return logOutput.contains("database system is ready to accept connections");
   }
 
   @Override
-  protected String getHivePassword() {
-    return "hivepassword";
+  public String getHivePassword() {
+    return HIVE_PASSWORD;
   }
 }
diff --git a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java
index b4a0844..a93e23b 100644
--- a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java
+++ b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/tools/schematool/TestMetastoreSchemaTool.java
@@ -40,7 +40,7 @@ public class TestMetastoreSchemaTool {
   @Mock
   private Configuration conf;
   private MetastoreSchemaTool.CommandBuilder builder;
-  private String pasword = "reallySimplePassword";
+  private String password = "reallySimplePassword";
 
   @Before
   public void setup() throws IOException {
@@ -49,7 +49,9 @@ public class TestMetastoreSchemaTool {
     if (!file.exists()) {
       file.createNewFile();
     }
-    builder = new MetastoreSchemaTool.CommandBuilder(conf, null, null, "testUser", pasword, scriptFile);
+    builder =
+        new MetastoreSchemaTool.CommandBuilder(conf, null, null, "testUser", password, scriptFile)
+            .setVerbose(false);
   }
 
   @After
@@ -59,12 +61,12 @@ public class TestMetastoreSchemaTool {
 
   @Test
   public void shouldReturnStrippedPassword() throws IOException {
-    assertFalse(builder.buildToLog().contains(pasword));
+    assertFalse(builder.buildToLog().contains(password));
   }
 
   @Test
   public void shouldReturnActualPassword() throws IOException {
     String[] strings = builder.buildToRun();
-    assertTrue(Arrays.asList(strings).contains(pasword));
+    assertTrue(Arrays.asList(strings).contains(password));
   }
 }