You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by so...@apache.org on 2019/01/11 17:03:38 UTC

[drill] branch master updated (fc1eacd -> 7be4c53)

This is an automated email from the ASF dual-hosted git repository.

sorabh pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from fc1eacd  Changes following second code review
     new 4b66226  DRILL-6918: Skip ensureAtLeastOneField when there are no records
     new e76d1e2  DRILL-6928: Update description for exec.query.return_result_set_for_ddl option to reflect it affects JDBC connections only closes #1601
     new eb02999  DRILL-6955: storage-jdbc tests improvements
     new 7be4c53  DRILL-6954: Move commons libs used in UDFs module to the dependency management closes #1604

The 4 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 contrib/storage-jdbc/pom.xml                       | 241 +--------------------
 ...ithDerbyIT.java => TestJdbcPluginWithH2IT.java} | 144 +++++++++---
 .../exec/store/jdbc/TestJdbcPluginWithMySQLIT.java | 117 ++++++++--
 .../test/resources/bootstrap-storage-plugins.json  |  25 ---
 .../{derby-test-data.sql => h2-test-data.sql}      |   4 +-
 .../src/test/resources/mysql-test-data-linux.sql   |   2 -
 .../src/test/resources/mysql-test-data.sql         |   3 -
 .../drill/exec/store/kafka/KafkaRecordReader.java  |   4 +-
 contrib/udfs/pom.xml                               |  11 +-
 .../java/org/apache/drill/exec/ExecConstants.java  |   5 +-
 pom.xml                                            |  34 ++-
 11 files changed, 260 insertions(+), 330 deletions(-)
 rename contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/{TestJdbcPluginWithDerbyIT.java => TestJdbcPluginWithH2IT.java} (51%)
 delete mode 100755 contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json
 rename contrib/storage-jdbc/src/test/resources/{derby-test-data.sql => h2-test-data.sql} (96%)


[drill] 04/04: DRILL-6954: Move commons libs used in UDFs module to the dependency management closes #1604

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sorabh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 7be4c53b31c543d2d43651664d4d85b265fbfec7
Author: Arina Ielchiieva <ar...@gmail.com>
AuthorDate: Tue Jan 8 14:52:39 2019 +0200

    DRILL-6954: Move commons libs used in UDFs module to the dependency management
    closes #1604
---
 contrib/udfs/pom.xml | 11 +----------
 pom.xml              | 34 +++++++++++++++++++++++++++++-----
 2 files changed, 30 insertions(+), 15 deletions(-)

diff --git a/contrib/udfs/pom.xml b/contrib/udfs/pom.xml
index 740c06e..c0bc4e5 100644
--- a/contrib/udfs/pom.xml
+++ b/contrib/udfs/pom.xml
@@ -40,31 +40,22 @@
     <dependency>
       <groupId>commons-net</groupId>
       <artifactId>commons-net</artifactId>
-      <version>3.6</version>
     </dependency>
 
     <dependency>
       <groupId>commons-validator</groupId>
       <artifactId>commons-validator</artifactId>
-      <version>1.6</version>
-      <exclusions>
-        <exclusion>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
 
     <dependency>
       <groupId>org.apache.commons</groupId>
       <artifactId>commons-text</artifactId>
-      <version>1.4</version>
     </dependency>
 
     <dependency>
       <groupId>com.esri.geometry</groupId>
       <artifactId>esri-geometry-api</artifactId>
-      <version>2.2.0</version>
+      <version>2.2.2</version>
     </dependency>
 
     <dependency>
diff --git a/pom.xml b/pom.xml
index f0ecf80..a2e0948 100644
--- a/pom.xml
+++ b/pom.xml
@@ -94,6 +94,9 @@
     <docker.repository>drill/apache-drill</docker.repository>
     <antlr.version>4.7.1</antlr.version>
     <lowestMavenVersion>3.3.3</lowestMavenVersion>
+    <commons.net.version>3.6</commons.net.version>
+    <commons.validator.version>1.6</commons.validator.version>
+    <commons.text.version>1.6</commons.text.version>
   </properties>
 
   <scm>
@@ -1702,6 +1705,32 @@
         <artifactId>antlr4-runtime</artifactId>
         <version>${antlr.version}</version>
       </dependency>
+      <dependency>
+        <groupId>sqlline</groupId>
+        <artifactId>sqlline</artifactId>
+        <version>${sqlline.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>commons-net</groupId>
+        <artifactId>commons-net</artifactId>
+        <version>${commons.net.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>commons-validator</groupId>
+        <artifactId>commons-validator</artifactId>
+        <version>${commons.validator.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.commons</groupId>
+        <artifactId>commons-text</artifactId>
+        <version>${commons.text.version}</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -2350,11 +2379,6 @@
             <artifactId>xalan</artifactId>
             <version>2.7.1</version>
           </dependency>
-          <dependency>
-            <groupId>sqlline</groupId>
-            <artifactId>sqlline</artifactId>
-            <version>${sqlline.version}</version>
-          </dependency>
 
           <dependency>
             <groupId>org.apache.calcite</groupId>


[drill] 01/04: DRILL-6918: Skip ensureAtLeastOneField when there are no records

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sorabh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 4b66226dc7e33d9173e6e714d5cf85beea1e4e46
Author: Abhishek Ravi <ab...@gmail.com>
AuthorDate: Fri Jan 4 00:04:32 2019 -0800

    DRILL-6918: Skip ensureAtLeastOneField when there are no records
    
    If none of the project / filter columns, exist in the vector, ensureAtLeastOneField (or the Scan operator) adds at least one field as nullable integer (or nullable varchar if `allTextmode` is enabled).
    
    The downstream Filter operator would then go on to fail with `NumberFormatException` because it tries to convert empty fields to integers.
    
    Since ensureAtLeastOneField is called after reading all the messages in a batch, it can be skipped if the batch is empty.
    
    closes #1595
---
 .../java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java     | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java
index 9559c3d..62e588c 100644
--- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java
+++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaRecordReader.java
@@ -119,7 +119,9 @@ public class KafkaRecordReader extends AbstractRecordReader {
         }
       }
 
-      messageReader.ensureAtLeastOneField();
+      if (currentMessageCount > 0) {
+        messageReader.ensureAtLeastOneField();
+      }
       writer.setValueCount(currentMessageCount);
       logger.debug("Took {} ms to process {} records.", watch.elapsed(TimeUnit.MILLISECONDS), currentMessageCount);
       logger.debug("Last offset consumed for {}:{} is {}", subScanSpec.getTopicName(), subScanSpec.getPartitionId(),


[drill] 02/04: DRILL-6928: Update description for exec.query.return_result_set_for_ddl option to reflect it affects JDBC connections only closes #1601

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sorabh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit e76d1e2702e87d3ab01feab9ffe231f87af957ab
Author: Bohdan Kazydub <bo...@gmail.com>
AuthorDate: Tue Jan 8 14:17:17 2019 +0200

    DRILL-6928: Update description for exec.query.return_result_set_for_ddl option to reflect it affects JDBC connections only
    closes #1601
---
 .../java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index a0376a4..811c479 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -902,6 +902,7 @@ public final class ExecConstants {
 
   public static final String RETURN_RESULT_SET_FOR_DDL = "exec.query.return_result_set_for_ddl";
   public static final BooleanValidator RETURN_RESULT_SET_FOR_DDL_VALIDATOR = new BooleanValidator(RETURN_RESULT_SET_FOR_DDL,
-      new OptionDescription("Controls whether to return result set for CREATE TABLE / VIEW, DROP TABLE / VIEW, SET, USE etc. queries. " +
-          "If set to false affected rows count will be returned instead and result set will be null. Default is true. (Drill 1.15+)"));
+      new OptionDescription("Controls whether to return result set for CREATE TABLE / VIEW / FUNCTION, DROP TABLE / VIEW / FUNCTION, " +
+          "SET, USE, REFRESH METADATA TABLE queries. If set to false affected rows count will be returned instead and result set will be null. " +
+          "Affects JDBC connections only. Default is true. (Drill 1.15+)"));
 }


[drill] 03/04: DRILL-6955: storage-jdbc tests improvements

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sorabh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit eb0299982c90757fec869875b879dcdbd68a1b76
Author: Volodymyr Vysotskyi <vv...@gmail.com>
AuthorDate: Mon Jan 7 16:33:52 2019 +0200

    DRILL-6955: storage-jdbc tests improvements
    
    - Remove plugins usage for instantiating test databases and tables
    - Replace derby with h2 database
    
    closes #1603
---
 contrib/storage-jdbc/pom.xml                       | 241 +--------------------
 ...ithDerbyIT.java => TestJdbcPluginWithH2IT.java} | 144 +++++++++---
 .../exec/store/jdbc/TestJdbcPluginWithMySQLIT.java | 117 ++++++++--
 .../test/resources/bootstrap-storage-plugins.json  |  25 ---
 .../{derby-test-data.sql => h2-test-data.sql}      |   4 +-
 .../src/test/resources/mysql-test-data-linux.sql   |   2 -
 .../src/test/resources/mysql-test-data.sql         |   3 -
 7 files changed, 224 insertions(+), 312 deletions(-)

diff --git a/contrib/storage-jdbc/pom.xml b/contrib/storage-jdbc/pom.xml
index efaf5c1..6268874 100755
--- a/contrib/storage-jdbc/pom.xml
+++ b/contrib/storage-jdbc/pom.xml
@@ -32,9 +32,6 @@
 
   <properties>
     <mysql.connector.version>8.0.13</mysql.connector.version>
-    <derby.database.name>drill_derby_test</derby.database.name>
-    <mysql.database.name>drill_mysql_test</mysql.database.name>
-    <mysql.scriptFile.name>mysql-test-data.sql</mysql.scriptFile.name>
   </properties>
 
   <dependencies>
@@ -60,252 +57,36 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.derby</groupId>
-      <artifactId>derbyclient</artifactId>
-      <version>10.14.2.0</version>
+      <groupId>mysql</groupId>
+      <artifactId>mysql-connector-java</artifactId>
+      <version>${mysql.connector.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.derby</groupId>
-      <artifactId>derbynet</artifactId>
-      <version>10.14.2.0</version>
+      <groupId>com.wix</groupId>
+      <artifactId>wix-embedded-mysql</artifactId>
+      <version>4.2.0</version>
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>mysql</groupId>
-      <artifactId>mysql-connector-java</artifactId>
-      <version>${mysql.connector.version}</version>
+      <groupId>com.h2database</groupId>
+      <artifactId>h2</artifactId>
+      <version>1.4.197</version>
       <scope>test</scope>
     </dependency>
   </dependencies>
 
-  <profiles>
-    <profile>
-      <id>linux</id>
-      <properties>
-        <!-- Reg expr includes both mysql-test-data.sql and mysql-test-data-linux.sql script files -->
-        <mysql.scriptFile.name>mysql-test-data*.sql</mysql.scriptFile.name>
-      </properties>
-      <activation>
-        <os>
-          <family>linux</family>
-        </os>
-      </activation>
-    </profile>
-  </profiles>
-
   <build>
-    <testResources>
-      <testResource>
-        <directory>src/test/resources</directory>
-        <filtering>true</filtering>
-      </testResource>
-    </testResources>
     <plugins>
       <plugin>
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
           <!--
-          Forking multiple processes can cause race conditions with the initialization of
-          the test databases.
+            Forking multiple processes can cause race conditions with the initialization of
+            the test databases.
           -->
           <forkCount combine.self="override">1</forkCount>
-          <excludes>
-            <exclude>**/*</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <!-- Because the JDBC tests are somewhat heavyweight, we only run them in the 'verify' phase -->
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-failsafe-plugin</artifactId>
-        <version>2.22.1</version>
-        <configuration>
-          <forkCount combine.self="override">1</forkCount>
-          <systemPropertyVariables>
-            <derby.port>${derby.reserved.port}</derby.port>
-            <mysql.port>${mysql.reserved.port}</mysql.port>
-            <mysql.name>${mysql.database.name}</mysql.name>
-          </systemPropertyVariables>
-          <skipITs>${skipTests}</skipITs>
-          <includes>
-            <include>**/*IT.java</include>
-          </includes>
-          <useSystemClassLoader>false</useSystemClassLoader>
-        </configuration>
-        <executions>
-          <execution>
-            <id>run-IT-Tests</id>
-            <phase>integration-test</phase>
-            <goals>
-              <goal>integration-test</goal>
-            </goals>
-          </execution>
-          <execution>
-            <phase>verify</phase>
-            <goals>
-              <goal>verify</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <!-- Allows us to reserve ports for external servers that we will launch  -->
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>reserve-network-port</id>
-            <goals>
-              <goal>reserve-network-port</goal>
-            </goals>
-            <phase>process-resources</phase>
-            <configuration>
-              <portNames>
-                <portName>derby.reserved.port</portName>
-                <portName>mysql.reserved.port</portName>
-              </portNames>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <goals>
-              <goal>unpack</goal>
-            </goals>
-            <configuration>
-              <skip>${skipTests}</skip>
-              <artifactItems>
-                <artifactItem>
-                  <!-- This will download a MySQL distribution and use it to run tests against -->
-                  <groupId>com.jcabi</groupId>
-                  <artifactId>mysql-dist</artifactId>
-                  <version>5.6.14</version>
-                  <classifier>${mysql.classifier}</classifier>
-                  <type>zip</type>
-                  <overWrite>false</overWrite>
-                  <outputDirectory>${project.build.directory}/mysql-dist</outputDirectory>
-                </artifactItem>
-              </artifactItems>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <!-- Allows us to run tests against an in-memory Derby database -->
-        <groupId>com.btmatthews.maven.plugins.inmemdb</groupId>
-        <artifactId>inmemdb-maven-plugin</artifactId>
-        <version>1.4.3</version>
-        <configuration>
-          <monitorKey>inmemdb</monitorKey>
-          <monitorPort>11527</monitorPort>
-          <skip>${skipTests}</skip>
-        </configuration>
-        <executions>
-          <execution>
-            <id>run</id>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <phase>pre-integration-test</phase>
-            <configuration>
-              <daemon>true</daemon>
-              <type>derby</type>
-              <database>${derby.database.name}</database>
-              <username>root</username>
-              <password>root</password>
-              <port>${derby.reserved.port}</port>
-              <sources>
-                <script>
-                  <sourceFile>${basedir}/src/test/resources/derby-test-data.sql</sourceFile>
-                </script>
-              </sources>
-            </configuration>
-          </execution>
-          <execution>
-            <id>stop</id>
-            <goals>
-              <goal>stop</goal>
-            </goals>
-            <phase>post-integration-test</phase>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>com.jcabi</groupId>
-        <artifactId>jcabi-mysql-maven-plugin</artifactId>
-        <version>0.9</version>
-        <dependencies>
-          <dependency>
-            <groupId>org.hibernate</groupId>
-            <artifactId>hibernate-validator</artifactId>
-            <version>5.0.0.Final</version>
-            <scope>runtime</scope>
-          </dependency>
-          <dependency>
-            <groupId>javax.xml.bind</groupId>
-            <artifactId>jaxb-api</artifactId>
-            <version>2.3.1</version>
-            <scope>runtime</scope>
-          </dependency>
-        </dependencies>
-        <executions>
-          <execution>
-            <id>mysql-test</id>
-            <goals>
-              <goal>classify</goal>
-              <goal>start</goal>
-              <goal>stop</goal>
-            </goals>
-            <configuration>
-              <skip>${skipTests}</skip>
-              <port>${mysql.reserved.port}</port>
-              <data>${project.build.directory}/mysql-data</data>
-              <clearexistingdata>true</clearexistingdata>
-              <dbname>${mysql.database.name}</dbname>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>sql-maven-plugin</artifactId>
-        <version>1.5</version>
-        <dependencies>
-          <dependency>
-            <groupId>mysql</groupId>
-            <artifactId>mysql-connector-java</artifactId>
-            <version>${mysql.connector.version}</version>
-          </dependency>
-        </dependencies>
-        <configuration>
-          <skip>${skipTests}</skip>
-          <driver>com.mysql.cj.jdbc.Driver</driver>
-          <username>root</username>
-          <password>root</password>
-          <url>jdbc:mysql://localhost:${mysql.reserved.port}/${mysql.database.name}</url>
         </configuration>
-        <executions>
-          <execution>
-            <id>create-tables</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>execute</goal>
-            </goals>
-            <configuration>
-              <orderFile>ascending</orderFile>
-              <fileset>
-                <basedir>${basedir}/src/test/resources</basedir>
-                <includes>
-                  <include>${mysql.scriptFile.name}</include>
-                </includes>
-              </fileset>
-            </configuration>
-          </execution>
-        </executions>
       </plugin>
     </plugins>
   </build>
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java
similarity index 51%
rename from contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java
rename to contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java
index 168b5f3..a40eec2 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithDerbyIT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java
@@ -18,48 +18,96 @@
 package org.apache.drill.exec.store.jdbc;
 
 import org.apache.drill.categories.JdbcStorageTest;
-import org.apache.drill.PlanTestBase;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.expr.fn.impl.DateUtility;
 
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.StoragePluginRegistryImpl;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterTest;
+import org.h2.tools.RunScript;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.io.FileReader;
 import java.math.BigDecimal;
+import java.net.URL;
 import java.nio.file.Paths;
+import java.sql.Connection;
+import java.sql.DriverManager;
 
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.core.IsNot.not;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
 
 /**
- * JDBC storage plugin tests against Derby.
+ * JDBC storage plugin tests against H2.
  */
 @Category(JdbcStorageTest.class)
-public class TestJdbcPluginWithDerbyIT extends PlanTestBase {
+public class TestJdbcPluginWithH2IT extends ClusterTest {
 
   private static final String TABLE_PATH = "jdbcmulti/";
   private static final String TABLE_NAME = String.format("%s.`%s`", StoragePluginTestUtils.DFS_PLUGIN_NAME, TABLE_PATH);
 
   @BeforeClass
+  public static void initH2() throws Exception {
+    Class.forName("org.h2.Driver");
+    String connString = String.format(
+        "jdbc:h2:%s", dirTestWatcher.getTmpDir().getCanonicalPath());
+
+    try (Connection connection = DriverManager.getConnection(connString, "root", "root")) {
+      URL scriptFile = TestJdbcPluginWithH2IT.class.getClassLoader().getResource("h2-test-data.sql");
+      Assert.assertNotNull("Script for test tables generation 'h2-test-data.sql' " +
+          "cannot be found in test resources", scriptFile);
+      RunScript.execute(connection, new FileReader(scriptFile.getFile()));
+    }
+
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+
+    JdbcStorageConfig jdbcStorageConfig = new JdbcStorageConfig(
+        "org.h2.Driver",
+        connString,
+        "root",
+        "root",
+        true);
+    jdbcStorageConfig.setEnabled(true);
+
+    String pluginName = "h2";
+    DrillbitContext context = cluster.drillbit().getContext();
+    JdbcStoragePlugin jdbcStoragePlugin = new JdbcStoragePlugin(jdbcStorageConfig,
+        context, pluginName);
+    StoragePluginRegistryImpl pluginRegistry = (StoragePluginRegistryImpl) context.getStorage();
+    pluginRegistry.addPluginToPersistentStoreIfAbsent(pluginName, jdbcStorageConfig, jdbcStoragePlugin);
+  }
+
+  @BeforeClass
   public static void copyData() {
     dirTestWatcher.copyResourceToRoot(Paths.get(TABLE_PATH));
   }
 
   @Test
   public void testCrossSourceMultiFragmentJoin() throws Exception {
-    testNoResult("SET `planner.slice_target` = 1");
-    test("select x.person_id, y.salary from derby.drill_derby_test.person x "
-        + "join %s y on x.person_id = y.person_id ", TABLE_NAME);
+    try {
+      client.alterSession(ExecConstants.SLICE_TARGET, 1);
+      run("select x.person_id, y.salary from h2.drill_h2_test.person x "
+              + "join %s y on x.person_id = y.person_id ", TABLE_NAME);
+    } finally {
+      client.resetSession(ExecConstants.SLICE_TARGET);
+    }
   }
 
   @Test
   public void validateResult() throws Exception {
-    // Skip date, time, and timestamp types since derby mangles these due to improper timezone support.
+    // Skip date, time, and timestamp types since h2 mangles these due to improper timezone support.
     testBuilder()
         .sqlQuery(
             "select person_id, first_name, last_name, address, city, state, zip, json, bigint_field, smallint_field, " +
                 "numeric_field, boolean_field, double_field, float_field, real_field, time_field, timestamp_field, " +
-                "date_field, clob_field from derby.`drill_derby_test`.person")
+                "date_field, clob_field from h2.`drill_h2_test`.person")
         .ordered()
         .baselineColumns("person_id", "first_name", "last_name", "address", "city", "state", "zip", "json",
             "bigint_field", "smallint_field", "numeric_field", "boolean_field", "double_field", "float_field",
@@ -85,84 +133,108 @@ public class TestJdbcPluginWithDerbyIT extends PlanTestBase {
 
   @Test
   public void pushdownJoin() throws Exception {
-    testNoResult("use derby");
-    String query = "select x.person_id from (select person_id from derby.drill_derby_test.person) x "
-            + "join (select person_id from derby.drill_derby_test.person) y on x.person_id = y.person_id ";
-    testPlanMatchingPatterns(query, new String[]{}, new String[]{"Join"});
+    run("use h2");
+    String query = "select x.person_id from (select person_id from h2.drill_h2_test.person) x "
+            + "join (select person_id from h2.drill_h2_test.person) y on x.person_id = y.person_id ";
+
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Join operator",
+        plan, not(containsString("Join")));
   }
 
   @Test
   public void pushdownJoinAndFilterPushDown() throws Exception {
-    final String query = "select * from \n" +
-        "derby.drill_derby_test.person e\n" +
+    String query = "select * from \n" +
+        "h2.drill_h2_test.person e\n" +
         "INNER JOIN \n" +
-        "derby.drill_derby_test.person s\n" +
+        "h2.drill_h2_test.person s\n" +
         "ON e.FIRST_NAME = s.FIRST_NAME\n" +
         "WHERE e.LAST_NAME > 'hello'";
 
-    testPlanMatchingPatterns(query, new String[] {}, new String[] { "Join", "Filter" });
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Join operator",
+        plan, not(containsString("Join")));
+    assertThat("Query plan shouldn't contain Filter operator",
+        plan, not(containsString("Filter")));
   }
 
   @Test
   public void pushdownAggregation() throws Exception {
-    final String query = "select count(*) from derby.drill_derby_test.person";
-    testPlanMatchingPatterns(query, new String[] {}, new String[] { "Aggregate" });
+    String query = "select count(*) from h2.drill_h2_test.person";
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Aggregate operator",
+        plan, not(containsString("Aggregate")));
   }
 
   @Test
   public void pushdownDoubleJoinAndFilter() throws Exception {
-    final String query = "select * from \n" +
-        "derby.drill_derby_test.person e\n" +
+    String query = "select * from \n" +
+        "h2.drill_h2_test.person e\n" +
         "INNER JOIN \n" +
-        "derby.drill_derby_test.person s\n" +
+        "h2.drill_h2_test.person s\n" +
         "ON e.person_ID = s.person_ID\n" +
         "INNER JOIN \n" +
-        "derby.drill_derby_test.person ed\n" +
+        "h2.drill_h2_test.person ed\n" +
         "ON e.person_ID = ed.person_ID\n" +
         "WHERE s.first_name > 'abc' and ed.first_name > 'efg'";
-    testPlanMatchingPatterns(query, new String[] {}, new String[] { "Join", "Filter" });
+
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Join operator",
+        plan, not(containsString("Join")));
+    assertThat("Query plan shouldn't contain Filter operator",
+        plan, not(containsString("Filter")));
   }
 
   @Test
   public void showTablesDefaultSchema() throws Exception {
-    testNoResult("use derby.drill_derby_test");
-    assertEquals(1, testSql("show tables like 'PERSON'"));
+    run("use h2.drill_h2_test");
+    assertEquals(1, queryBuilder().sql("show tables like 'PERSON'").run().recordCount());
 
     // check table names case insensitivity
-    assertEquals(1, testSql("show tables like 'person'"));
+    assertEquals(1, queryBuilder().sql("show tables like 'person'").run().recordCount());
   }
 
   @Test
   public void describe() throws Exception {
-    testNoResult("use derby.drill_derby_test");
-    assertEquals(19, testSql("describe PERSON"));
+    run("use h2.drill_h2_test");
+    assertEquals(19, queryBuilder().sql("describe PERSON").run().recordCount());
 
     // check table names case insensitivity
-    assertEquals(19, testSql("describe person"));
+    assertEquals(19, queryBuilder().sql("describe person").run().recordCount());
   }
 
   @Test
   public void ensureDrillFunctionsAreNotPushedDown() throws Exception {
     // This should verify that we're not trying to push CONVERT_FROM into the JDBC storage plugin. If were pushing
     // this function down, the SQL query would fail.
-    testNoResult("select CONVERT_FROM(JSON, 'JSON') from derby.drill_derby_test.person where person_ID = 4");
+    run("select CONVERT_FROM(JSON, 'JSON') from h2.drill_h2_test.person where person_ID = 4");
   }
 
   @Test
   public void pushdownFilter() throws Exception {
-    String query = "select * from derby.drill_derby_test.person where person_ID = 1";
-    testPlanMatchingPatterns(query, new String[]{}, new String[]{"Filter"});
+    String query = "select * from h2.drill_h2_test.person where person_ID = 1";
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Filter operator",
+        plan, not(containsString("Filter")));
   }
 
   @Test
   public void testCaseInsensitiveTableNames() throws Exception {
-    assertEquals(5, testSql("select * from derby.drill_derby_test.PeRsOn"));
-    assertEquals(5, testSql("select * from derby.drill_derby_test.PERSON"));
-    assertEquals(5, testSql("select * from derby.drill_derby_test.person"));
+    assertEquals(5, queryBuilder().sql("select * from h2.drill_h2_test.PeRsOn").run().recordCount());
+    assertEquals(5, queryBuilder().sql("select * from h2.drill_h2_test.PERSON").run().recordCount());
+    assertEquals(5, queryBuilder().sql("select * from h2.drill_h2_test.person").run().recordCount());
   }
 
   @Test
   public void testJdbcStoragePluginSerDe() throws Exception {
-    testPhysicalPlanExecutionBasedOnQuery("select * from derby.drill_derby_test.PeRsOn");
+    String query = "select * from h2.drill_h2_test.PeRsOn";
+
+    String plan = queryBuilder().sql(query).explainJson();
+    assertEquals(5, queryBuilder().physical(plan).run().recordCount());
   }
 }
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
index 963d75e..447e76a 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
@@ -17,24 +17,103 @@
  */
 package org.apache.drill.exec.store.jdbc;
 
+import com.wix.mysql.EmbeddedMysql;
+import com.wix.mysql.ScriptResolver;
+import com.wix.mysql.config.MysqldConfig;
+import com.wix.mysql.config.SchemaConfig;
+import com.wix.mysql.distribution.Version;
 import org.apache.drill.categories.JdbcStorageTest;
 import org.apache.drill.exec.expr.fn.impl.DateUtility;
-import org.apache.drill.PlanTestBase;
-
+import org.apache.drill.exec.store.StoragePluginRegistryImpl;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterTest;
+import org.apache.drill.test.QueryTestUtil;
+import org.joda.time.DateTimeZone;
+import org.junit.AfterClass;
 import org.junit.Assume;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.math.BigDecimal;
 
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.core.IsNot.not;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
 
 /**
  * JDBC storage plugin tests against MySQL.
  * Note: it requires libaio.so library in the system
  */
 @Category(JdbcStorageTest.class)
-public class TestJdbcPluginWithMySQLIT extends PlanTestBase {
+public class TestJdbcPluginWithMySQLIT extends ClusterTest {
+
+  private static EmbeddedMysql mysqld;
+
+  @BeforeClass
+  public static void initMysql() throws Exception {
+    String mysqlPluginName = "mysql";
+    String mysqlDBName = "drill_mysql_test";
+    int mysqlPort = QueryTestUtil.getFreePortNumber(2215, 300);
+
+    MysqldConfig config = MysqldConfig.aMysqldConfig(Version.v5_6_21)
+        .withPort(mysqlPort)
+        .withUser("mysqlUser", "mysqlPass")
+        .withTimeZone(DateTimeZone.UTC.toTimeZone())
+        .build();
+
+    SchemaConfig.Builder schemaConfig = SchemaConfig.aSchemaConfig(mysqlDBName)
+        .withScripts(ScriptResolver.classPathScript("mysql-test-data.sql"));
+
+    String osName = System.getProperty("os.name").toLowerCase();
+    if (osName.startsWith("linux")) {
+      schemaConfig.withScripts(ScriptResolver.classPathScript("mysql-test-data-linux.sql"));
+    }
+
+    mysqld = EmbeddedMysql.anEmbeddedMysql(config)
+        .addSchema(schemaConfig.build())
+        .start();
+
+    startCluster(ClusterFixture.builder(dirTestWatcher));
+
+    StoragePluginRegistryImpl pluginRegistry = (StoragePluginRegistryImpl) cluster.drillbit().getContext().getStorage();
+
+    JdbcStorageConfig jdbcStorageConfig = new JdbcStorageConfig(
+        "com.mysql.cj.jdbc.Driver",
+        String.format("jdbc:mysql://localhost:%s/%s?useJDBCCompliantTimezoneShift=true", mysqlPort, mysqlDBName),
+        "mysqlUser",
+        "mysqlPass",
+        false);
+    jdbcStorageConfig.setEnabled(true);
+
+    JdbcStoragePlugin jdbcStoragePlugin = new JdbcStoragePlugin(jdbcStorageConfig,
+        cluster.drillbit().getContext(), mysqlPluginName);
+    pluginRegistry.addPluginToPersistentStoreIfAbsent(mysqlPluginName, jdbcStorageConfig, jdbcStoragePlugin);
+
+    if (osName.startsWith("linux")) {
+      // adds storage plugin with case insensitive table names
+      String mysqlCaseSensitivePluginName = "mysqlCaseInsensitive";
+      JdbcStorageConfig jdbcCaseSensitiveStorageConfig = new JdbcStorageConfig(
+          "com.mysql.cj.jdbc.Driver",
+          String.format("jdbc:mysql://localhost:%s/%s?useJDBCCompliantTimezoneShift=true", mysqlPort, mysqlDBName),
+          "mysqlUser",
+          "mysqlPass",
+          true);
+      jdbcCaseSensitiveStorageConfig.setEnabled(true);
+
+      JdbcStoragePlugin jdbcCaseSensitiveStoragePlugin = new JdbcStoragePlugin(jdbcCaseSensitiveStorageConfig,
+          cluster.drillbit().getContext(), mysqlCaseSensitivePluginName);
+      pluginRegistry.addPluginToPersistentStoreIfAbsent(mysqlCaseSensitivePluginName, jdbcCaseSensitiveStorageConfig, jdbcCaseSensitiveStoragePlugin);
+    }
+  }
+
+  @AfterClass
+  public static void stopMysql() {
+    if (mysqld != null) {
+      mysqld.stop();
+    }
+  }
 
   @Test
   public void validateResult() throws Exception {
@@ -110,14 +189,17 @@ public class TestJdbcPluginWithMySQLIT extends PlanTestBase {
             null,
             null,
             null, "XXX")
-            .go();
+         .go();
   }
 
   @Test
   public void pushdownJoin() throws Exception {
     String query = "select x.person_id from (select person_id from mysql.`drill_mysql_test`.person) x "
             + "join (select person_id from mysql.`drill_mysql_test`.person) y on x.person_id = y.person_id ";
-    testPlanMatchingPatterns(query, new String[]{}, new String[]{"Join"});
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Join operator",
+        plan, not(containsString("Join")));
   }
 
   @Test
@@ -129,19 +211,26 @@ public class TestJdbcPluginWithMySQLIT extends PlanTestBase {
             "ON e.first_name = s.first_name " +
             "WHERE e.last_name > 'hello'";
 
-    testPlanMatchingPatterns(query, new String[] {}, new String[] { "Join", "Filter" });
+    String plan = queryBuilder().sql(query).explainText();
+
+    assertThat("Query plan shouldn't contain Join operator",
+        plan, not(containsString("Join")));
+    assertThat("Query plan shouldn't contain Filter operator",
+        plan, not(containsString("Filter")));
   }
 
   @Test
   public void testPhysicalPlanSubmission() throws Exception {
-    testPhysicalPlanExecutionBasedOnQuery("select * from mysql.`drill_mysql_test`.person");
+    String query = "select * from mysql.`drill_mysql_test`.person";
+    String plan = queryBuilder().sql(query).explainJson();
+    assertEquals(4, queryBuilder().physical(plan).run().recordCount());
   }
 
   @Test
   public void emptyOutput() throws Exception {
     String query = "select * from mysql.`drill_mysql_test`.person e limit 0";
 
-    test(query);
+    run(query);
   }
 
   @Test
@@ -151,16 +240,16 @@ public class TestJdbcPluginWithMySQLIT extends PlanTestBase {
         "Skip tests for non-linux systems due to " +
             "table names case-insensitivity problems on Windows and MacOS",
         osName.startsWith("linux"));
-    test("use mysqlCaseInsensitive.`drill_mysql_test`");
+    run("use mysqlCaseInsensitive.`drill_mysql_test`");
     // two table names match the filter ignoring the case
-    assertEquals(2, testSql("show tables like 'caseSensitiveTable'"));
+    assertEquals(2, queryBuilder().sql("show tables like 'caseSensitiveTable'").run().recordCount());
 
-    test("use mysql.`drill_mysql_test`");
+    run("use mysql.`drill_mysql_test`");
     // single table matches the filter considering table name the case
-    assertEquals(1, testSql("show tables like 'caseSensitiveTable'"));
+    assertEquals(1, queryBuilder().sql("show tables like 'caseSensitiveTable'").run().recordCount());
 
     // checks that tables with names in different case are recognized correctly
-    assertEquals(1, testSql("describe caseSensitiveTable"));
-    assertEquals(2, testSql("describe CASESENSITIVETABLE"));
+    assertEquals(1, queryBuilder().sql("describe caseSensitiveTable").run().recordCount());
+    assertEquals(2, queryBuilder().sql("describe CASESENSITIVETABLE").run().recordCount());
   }
 }
diff --git a/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json b/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json
deleted file mode 100755
index 1c2aa3f..0000000
--- a/contrib/storage-jdbc/src/test/resources/bootstrap-storage-plugins.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
-    "storage" : {
-        derby : {
-          type    : "jdbc",
-          driver  : "org.apache.derby.jdbc.ClientDriver",
-          url     : "jdbc:derby://localhost:${derby.reserved.port}/memory:${derby.database.name};user=root;password=root",
-          caseInsensitiveTableNames: true,
-          enabled : true
-        },
-        mysql : {
-          type    : "jdbc",
-          driver  : "com.mysql.cj.jdbc.Driver",
-          url     : "jdbc:mysql://localhost:${mysql.reserved.port}/${mysql.database.name}?user=root&password=root&useJDBCCompliantTimezoneShift=true",
-          enabled : true
-        },
-        mysqlCaseInsensitive : {
-          type    : "jdbc",
-          driver  : "com.mysql.cj.jdbc.Driver",
-          url     : "jdbc:mysql://localhost:${mysql.reserved.port}/${mysql.database.name}?user=root&password=root&useJDBCCompliantTimezoneShift=true",
-          caseInsensitiveTableNames: true,
-          enabled : true
-        }
-    }
-}
-
diff --git a/contrib/storage-jdbc/src/test/resources/derby-test-data.sql b/contrib/storage-jdbc/src/test/resources/h2-test-data.sql
similarity index 96%
rename from contrib/storage-jdbc/src/test/resources/derby-test-data.sql
rename to contrib/storage-jdbc/src/test/resources/h2-test-data.sql
index 0d69724..a2643fb 100644
--- a/contrib/storage-jdbc/src/test/resources/derby-test-data.sql
+++ b/contrib/storage-jdbc/src/test/resources/h2-test-data.sql
@@ -1,6 +1,6 @@
 
-create SCHEMA drill_derby_test;
-set schema drill_derby_test;
+create SCHEMA drill_h2_test;
+set schema drill_h2_test;
 
 create table person (
   person_id       INT NOT NULL PRIMARY KEY,
diff --git a/contrib/storage-jdbc/src/test/resources/mysql-test-data-linux.sql b/contrib/storage-jdbc/src/test/resources/mysql-test-data-linux.sql
index e209166..2d7f3ca 100644
--- a/contrib/storage-jdbc/src/test/resources/mysql-test-data-linux.sql
+++ b/contrib/storage-jdbc/src/test/resources/mysql-test-data-linux.sql
@@ -1,5 +1,3 @@
-set global time_zone = "+00:00";
-
 use drill_mysql_test;
 
 create table CASESENSITIVETABLE (
diff --git a/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql b/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql
index 0d2ab68..9a180e0 100644
--- a/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql
+++ b/contrib/storage-jdbc/src/test/resources/mysql-test-data.sql
@@ -1,6 +1,3 @@
-
-set global time_zone = "+00:00";
-
 use drill_mysql_test;
 
 create table caseSensitiveTable (