You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by am...@apache.org on 2018/05/12 18:04:29 UTC

[drill] branch master updated (c1f0adc -> ca90229)

This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from c1f0adc  DRILL-6242 Use java.time.Local{Date|Time|DateTime} for Drill Date, Time, Timestamp types. (#3)
     new 9a47d55  DRILL-6363: Upgrade jmockit and mockito libs
     new ca90229  DRILL-6272: Refactor dynamic UDFs and function initializer tests to generate needed binary and source jars at runtime

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../test/java/org/apache/drill/test/TestTools.java |   2 +-
 exec/java-exec/pom.xml                             |  48 ++
 .../drill/TestTpchDistributedConcurrent.java       |   3 +-
 .../drill/exec/coord/zk/TestZookeeperClient.java   |   2 +-
 .../exec/expr/fn/FunctionInitializerTest.java      |  54 +--
 .../rpc/user/TemporaryTablesAutomaticDropTest.java |  42 +-
 .../java/org/apache/drill/exec/sql/TestCTTAS.java  | 315 ++++---------
 .../apache/drill/exec/udf/dynamic/JarBuilder.java  |  96 ++++
 .../udf/dynamic}/TestDynamicUDFSupport.java        | 523 ++++++++++-----------
 .../exec/vector/complex/TestEmptyPopulation.java   |   3 -
 .../java-exec/src/test/resources/drill-udf/pom.xml |  92 ++++
 .../drill/udf/dynamic/CustomAbsFunction.java       |  39 +-
 .../drill/udf/dynamic/CustomLogFunction.java       |  36 +-
 .../udf/dynamic/CustomLowerDummyFunction.java}     |  44 +-
 .../drill/udf/dynamic/CustomLowerFunction.java     |  42 +-
 .../drill/udf/dynamic/CustomLowerFunctionV2.java   |  42 +-
 .../drill/udf/dynamic/CustomUpperFunction.java     |  42 +-
 .../apache/drill/udf/dynamic/LowerFunction.java    |  42 +-
 .../drill-udf/src/main/resources/drill-module.conf |   1 +
 .../test/resources/jars/DrillUDF-1.0-sources.jar   | Bin 1892 -> 0 bytes
 .../src/test/resources/jars/DrillUDF-1.0.jar       | Bin 3146 -> 0 bytes
 .../test/resources/jars/DrillUDF-2.0-sources.jar   | Bin 1891 -> 0 bytes
 .../src/test/resources/jars/DrillUDF-2.0.jar       | Bin 3142 -> 0 bytes
 .../jars/DrillUDF-overloading-1.0-sources.jar      | Bin 3473 -> 0 bytes
 .../resources/jars/DrillUDF-overloading-1.0.jar    | Bin 5779 -> 0 bytes
 .../resources/jars/DrillUDF_Copy-1.0-sources.jar   | Bin 1892 -> 0 bytes
 .../src/test/resources/jars/DrillUDF_Copy-1.0.jar  | Bin 3185 -> 0 bytes
 .../jars/DrillUDF_DupFunc-1.0-sources.jar          | Bin 1888 -> 0 bytes
 .../test/resources/jars/DrillUDF_DupFunc-1.0.jar   | Bin 3201 -> 0 bytes
 .../resources/jars/DrillUDF_Empty-1.0-sources.jar  | Bin 536 -> 0 bytes
 .../src/test/resources/jars/DrillUDF_Empty-1.0.jar | Bin 1863 -> 0 bytes
 .../jars/DrillUDF_NoMarkerFile-1.0-sources.jar     | Bin 1715 -> 0 bytes
 .../resources/jars/DrillUDF_NoMarkerFile-1.0.jar   | Bin 3084 -> 0 bytes
 .../resources/jars/v2/DrillUDF-1.0-sources.jar     | Bin 1899 -> 0 bytes
 .../src/test/resources/jars/v2/DrillUDF-1.0.jar    | Bin 3215 -> 0 bytes
 pom.xml                                            |   7 +-
 36 files changed, 812 insertions(+), 663 deletions(-)
 create mode 100644 exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java
 rename exec/java-exec/src/test/java/org/apache/drill/{ => exec/udf/dynamic}/TestDynamicUDFSupport.java (65%)
 create mode 100644 exec/java-exec/src/test/resources/drill-udf/pom.xml
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java => exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java => exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java (63%)
 copy exec/java-exec/src/{main/java/org/apache/drill/exec/expr/fn/impl/conv/UTF8ConvertTo.java => test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java} (59%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java => exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java => exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java => exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java => exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java (61%)
 create mode 100644 exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar
 delete mode 100644 exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar

-- 
To stop receiving notification emails like this one, please contact
amansinha@apache.org.

[drill] 01/02: DRILL-6363: Upgrade jmockit and mockito libs

Posted by am...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 9a47d555b00052115016fbe35cad5a4147d42ad5
Author: Arina Ielchiieva <ar...@gmail.com>
AuthorDate: Sat May 5 15:31:10 2018 +0300

    DRILL-6363: Upgrade jmockit and mockito libs
---
 .../test/java/org/apache/drill/test/TestTools.java |   2 +-
 .../drill/TestTpchDistributedConcurrent.java       |   3 +-
 .../drill/exec/coord/zk/TestZookeeperClient.java   |   2 +-
 .../rpc/user/TemporaryTablesAutomaticDropTest.java |  42 ++-
 .../java/org/apache/drill/exec/sql/TestCTTAS.java  | 315 +++++++--------------
 .../exec/vector/complex/TestEmptyPopulation.java   |   3 -
 pom.xml                                            |   7 +-
 7 files changed, 134 insertions(+), 240 deletions(-)

diff --git a/common/src/test/java/org/apache/drill/test/TestTools.java b/common/src/test/java/org/apache/drill/test/TestTools.java
index 8cf7ca7..2735c54 100644
--- a/common/src/test/java/org/apache/drill/test/TestTools.java
+++ b/common/src/test/java/org/apache/drill/test/TestTools.java
@@ -47,7 +47,7 @@ public class TestTools {
     .indexOf("-agentlib:jdwp") > 0;
 
   public static TestRule getTimeoutRule(int timeout) {
-    return IS_DEBUG ? new TestName() : new Timeout(timeout);
+    return IS_DEBUG ? new TestName() : Timeout.millis(timeout);
   }
 
   /**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
index 8991b8b..74b9a5c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
@@ -33,6 +33,7 @@ import org.apache.drill.exec.rpc.user.UserResultsListener;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.test.QueryTestUtil;
 import org.junit.Rule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestRule;
 
@@ -176,7 +177,7 @@ public class TestTpchDistributedConcurrent extends BaseTestQuery {
     }
   }
 
-  //@Test
+  @Test
   public void testConcurrentQueries() throws Exception {
     QueryTestUtil.testRunAndPrint(client, UserBitShared.QueryType.SQL, alterSession);
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/coord/zk/TestZookeeperClient.java b/exec/java-exec/src/test/java/org/apache/drill/exec/coord/zk/TestZookeeperClient.java
index 87cf72d..e0e6c79 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/coord/zk/TestZookeeperClient.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/coord/zk/TestZookeeperClient.java
@@ -125,7 +125,7 @@ public class TestZookeeperClient {
 
     Mockito
         .when(client.getCache().getCurrentData(absPath))
-        .thenThrow(Exception.class);
+        .thenThrow(RuntimeException.class);
 
     client.hasPath(path);
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
index 5553519..5aef6f7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/rpc/user/TemporaryTablesAutomaticDropTest.java
@@ -19,39 +19,40 @@ package org.apache.drill.exec.rpc.user;
 
 import mockit.Mock;
 import mockit.MockUp;
+import org.apache.drill.exec.store.StorageStrategy;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
 import org.apache.drill.test.DirTestWatcher;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.junit.Before;
 import org.junit.Test;
 
 import java.io.File;
-import java.util.Properties;
 import java.util.UUID;
 
 import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 public class TemporaryTablesAutomaticDropTest extends BaseTestQuery {
 
-  private static final String session_id = "sessionId";
+  private static final UUID SESSION_UUID = UUID.randomUUID();
 
   @Before
-  public void setup() throws Exception {
+  public void setup() {
     new MockUp<UUID>() {
       @Mock
       public UUID randomUUID() {
-        return UUID.nameUUIDFromBytes(session_id.getBytes());
+        return SESSION_UUID;
       }
     };
-
-    Properties testConfigurations = cloneDefaultTestConfigProperties();
-    testConfigurations.put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, DFS_TMP_SCHEMA);
-    updateTestCluster(1, DrillConfig.create(testConfigurations));
+    updateTestCluster(1, DrillConfig.create(cloneDefaultTestConfigProperties()));
   }
 
   @Test
@@ -90,14 +91,25 @@ public class TemporaryTablesAutomaticDropTest extends BaseTestQuery {
   }
 
   private File createAndCheckSessionTemporaryLocation(String suffix, File schemaLocation) throws Exception {
-    final String temporaryTableName = "temporary_table_automatic_drop_" + suffix;
-    final File sessionTemporaryLocation = schemaLocation
-      .toPath()
-      .resolve(UUID.nameUUIDFromBytes(session_id.getBytes()).toString())
-      .toFile();
+    String temporaryTableName = "temporary_table_automatic_drop_" + suffix;
+    File sessionTemporaryLocation = schemaLocation.toPath().resolve(SESSION_UUID.toString()).toFile();
 
     test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
-    assertTrue("Session temporary location should exist", sessionTemporaryLocation.exists());
+
+    FileSystem fs = getLocalFileSystem();
+    Path sessionPath = new Path(sessionTemporaryLocation.getAbsolutePath());
+    assertTrue("Session temporary location should exist", fs.exists(sessionPath));
+    assertEquals("Directory permission should match",
+      StorageStrategy.TEMPORARY.getFolderPermission(), fs.getFileStatus(sessionPath).getPermission());
+    Path tempTablePath = new Path(sessionPath, SESSION_UUID.toString());
+    assertTrue("Temporary table location should exist", fs.exists(tempTablePath));
+    assertEquals("Directory permission should match",
+      StorageStrategy.TEMPORARY.getFolderPermission(), fs.getFileStatus(tempTablePath).getPermission());
+    RemoteIterator<LocatedFileStatus> fileIterator = fs.listFiles(tempTablePath, false);
+    while (fileIterator.hasNext()) {
+      LocatedFileStatus file = fileIterator.next();
+      assertEquals("File permission should match", StorageStrategy.TEMPORARY.getFilePermission(), file.getPermission());
+    }
     return sessionTemporaryLocation;
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
index 318e4c9..f2c0c82 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
@@ -18,78 +18,42 @@
 package org.apache.drill.exec.sql;
 
 import com.google.common.collect.Lists;
-import mockit.Mock;
-import mockit.MockUp;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.dfs.FileSystemConfig;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
-import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.store.StorageStrategy;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RemoteIterator;
-import org.apache.hadoop.fs.permission.FsPermission;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.rules.ExpectedException;
 
 import java.io.File;
-import java.io.IOException;
 import java.nio.file.Paths;
 import java.util.List;
-import java.util.Properties;
-import java.util.UUID;
 
 import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_PLUGIN_NAME;
 import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
 import static org.hamcrest.CoreMatchers.containsString;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
 
 @Category(SqlTest.class)
 public class TestCTTAS extends BaseTestQuery {
 
-  private static final UUID session_id = UUID.nameUUIDFromBytes("sessionId".getBytes());
   private static final String temp2_wk = "tmp2";
   private static final String temp2_schema = String.format("%s.%s", DFS_PLUGIN_NAME, temp2_wk);
 
-  private static FileSystem fs;
-  private static FsPermission expectedFolderPermission;
-  private static FsPermission expectedFilePermission;
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
 
   @BeforeClass
   public static void init() throws Exception {
-    MockUp<UUID> uuidMockUp = mockRandomUUID(session_id);
-    Properties testConfigurations = cloneDefaultTestConfigProperties();
-    testConfigurations.put(ExecConstants.DEFAULT_TEMPORARY_WORKSPACE, DFS_TMP_SCHEMA);
-    updateTestCluster(1, DrillConfig.create(testConfigurations));
-    uuidMockUp.tearDown();
-
     File tmp2 = dirTestWatcher.makeSubDir(Paths.get("tmp2"));
-
     StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
     FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getPlugin(DFS_PLUGIN_NAME).getConfig();
     pluginConfig.workspaces.put(temp2_wk, new WorkspaceConfig(tmp2.getAbsolutePath(), true, null, false));
     pluginRegistry.createOrUpdate(DFS_PLUGIN_NAME, pluginConfig, true);
-
-    fs = getLocalFileSystem();
-    expectedFolderPermission = new FsPermission(StorageStrategy.TEMPORARY.getFolderPermission());
-    expectedFilePermission = new FsPermission(StorageStrategy.TEMPORARY.getFilePermission());
-  }
-
-  private static MockUp<UUID> mockRandomUUID(final UUID uuid) {
-    return new MockUp<UUID>() {
-      @Mock
-      public UUID randomUUID() {
-        return uuid;
-      }
-    };
   }
 
   @Test
@@ -105,10 +69,8 @@ public class TestCTTAS extends BaseTestQuery {
     try {
       for (String storageFormat : storageFormats) {
         String temporaryTableName = "temp_" + storageFormat;
-        mockRandomUUID(UUID.nameUUIDFromBytes(temporaryTableName.getBytes()));
         test("alter session set `store.format`='%s'", storageFormat);
         test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-        checkPermission(temporaryTableName);
 
         testBuilder()
             .sqlQuery("select * from %s", temporaryTableName)
@@ -164,121 +126,91 @@ public class TestCTTAS extends BaseTestQuery {
   @Test
   public void testPartitionByWithTemporaryTables() throws Exception {
     String temporaryTableName = "temporary_table_with_partitions";
-    mockRandomUUID(UUID.nameUUIDFromBytes(temporaryTableName.getBytes()));
     test("create TEMPORARY table %s partition by (c1) as select * from (" +
         "select 'A' as c1 from (values(1)) union all select 'B' as c1 from (values(1))) t", temporaryTableName);
-    checkPermission(temporaryTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreationOutsideOfDefaultTemporaryWorkspace() throws Exception {
-    try {
-      String temporaryTableName = "temporary_table_outside_of_default_workspace";
-      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", temp2_schema, temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: Temporary tables are not allowed to be created / dropped " +
-              "outside of default temporary workspace [%s].", DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    String temporaryTableName = "temporary_table_outside_of_default_workspace";
+
+    expectUserRemoteExceptionWithMessage(String.format(
+      "VALIDATION ERROR: Temporary tables are not allowed to be created / dropped " +
+        "outside of default temporary workspace [%s].", DFS_TMP_SCHEMA));
+
+    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", temp2_schema, temporaryTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreateWhenTemporaryTableExistsWithoutSchema() throws Exception {
     String temporaryTableName = "temporary_table_exists_without_schema";
-    try {
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-         "VALIDATION ERROR: A table or view with given name [%s]" +
-             " already exists in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
+
+    expectUserRemoteExceptionWithTableExistsMessage(temporaryTableName, DFS_TMP_SCHEMA);
+
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreateWhenTemporaryTableExistsCaseInsensitive() throws Exception {
-    String temporaryTableName = "temporary_table_exists_without_schema";
-    try {
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName.toUpperCase());
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", temporaryTableName.toUpperCase(), DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    String temporaryTableName = "temporary_table_exists_case_insensitive";
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
+
+    expectUserRemoteExceptionWithTableExistsMessage(temporaryTableName.toUpperCase(), DFS_TMP_SCHEMA);
+
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName.toUpperCase());
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreateWhenTemporaryTableExistsWithSchema() throws Exception {
     String temporaryTableName = "temporary_table_exists_with_schema";
-    try {
-      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
-      test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
+
+    expectUserRemoteExceptionWithTableExistsMessage(temporaryTableName, DFS_TMP_SCHEMA);
+
+    test("create TEMPORARY table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreateWhenPersistentTableExists() throws Exception {
     String persistentTableName = "persistent_table_exists";
-    try {
-      test("create table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, persistentTableName);
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", persistentTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", persistentTableName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    test("create table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, persistentTableName);
+
+    expectUserRemoteExceptionWithTableExistsMessage(persistentTableName, DFS_TMP_SCHEMA);
+
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", persistentTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreateWhenViewExists() throws Exception {
     String viewName = "view_exists";
-    try {
-      test("create view %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, viewName);
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", viewName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", viewName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    test("create view %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, viewName);
+
+    expectUserRemoteExceptionWithTableExistsMessage(viewName, DFS_TMP_SCHEMA);
+
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", viewName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreatePersistentTableWhenTemporaryTableExists() throws Exception {
     String temporaryTableName = "temporary_table_exists_before_persistent";
-    try {
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-      test("create table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: A table or view with given name [%s]" +
-              " already exists in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
+
+    expectUserRemoteExceptionWithTableExistsMessage(temporaryTableName, DFS_TMP_SCHEMA);
+
+    test("create table %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testCreateViewWhenTemporaryTableExists() throws Exception {
     String temporaryTableName = "temporary_table_exists_before_view";
-    try {
-      test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
-      test("create view %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: A non-view table with given name [%s] already exists in schema [%s]",
-          temporaryTableName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
+
+    expectUserRemoteExceptionWithMessage(String.format(
+      "VALIDATION ERROR: A non-view table with given name [%s] already exists in schema [%s]",
+      temporaryTableName, DFS_TMP_SCHEMA));
+
+    test("create view %s.%s as select 'A' as c1 from (values(1))", DFS_TMP_SCHEMA, temporaryTableName);
   }
 
   @Test
@@ -358,21 +290,18 @@ public class TestCTTAS extends BaseTestQuery {
         .go();
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testTemporaryTablesInViewDefinitions() throws Exception {
     String temporaryTableName = "temporary_table_for_view_definition";
     test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
 
-    try {
-      test("create view %s.view_with_temp_table as select * from %s", DFS_TMP_SCHEMA, temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: Temporary tables usage is disallowed. Used temporary table name: [%s]", temporaryTableName)));
-      throw e;
-    }
+    expectUserRemoteExceptionWithMessage(String.format(
+      "VALIDATION ERROR: Temporary tables usage is disallowed. Used temporary table name: [%s]", temporaryTableName));
+
+    test("create view %s.view_with_temp_table as select * from %s", DFS_TMP_SCHEMA, temporaryTableName);
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testTemporaryTablesInViewExpansionLogic() throws Exception {
     String tableName = "table_for_expansion_logic_test";
     String viewName = "view_for_expansion_logic_test";
@@ -389,73 +318,48 @@ public class TestCTTAS extends BaseTestQuery {
 
     test("drop table %s", tableName);
     test("create temporary table %s as select 'TEMP' as c1 from (values(1))", tableName);
-    try {
-      test("select * from %s", viewName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-          "VALIDATION ERROR: Temporary tables usage is disallowed. Used temporary table name: [%s]", tableName)));
-      throw e;
-    }
+
+    expectUserRemoteExceptionWithMessage(String.format(
+      "VALIDATION ERROR: Temporary tables usage is disallowed. Used temporary table name: [%s]", tableName));
+
+    test("select * from %s", viewName);
   }
 
   @Test // DRILL-5952
   public void testCreateTemporaryTableIfNotExistsWhenTableWithSameNameAlreadyExists() throws Exception{
     final String newTblName = "createTemporaryTableIfNotExistsWhenATableWithSameNameAlreadyExists";
+    test("CREATE TEMPORARY TABLE %s.%s AS SELECT * from cp.`region.json`", DFS_TMP_SCHEMA, newTblName);
 
-    try {
-      String ctasQuery = String.format("CREATE TEMPORARY TABLE %s.%s AS SELECT * from cp.`region.json`", DFS_TMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
-
-      ctasQuery =
-        String.format("CREATE TEMPORARY TABLE IF NOT EXISTS %s AS SELECT * FROM cp.`employee.json`", newTblName);
+    testBuilder()
+      .sqlQuery("CREATE TEMPORARY TABLE IF NOT EXISTS %s AS SELECT * FROM cp.`employee.json`", newTblName)
+      .unOrdered()
+      .baselineColumns("ok", "summary")
+      .baselineValues(false, String.format("A table or view with given name [%s] already exists in schema [%s]", newTblName, DFS_TMP_SCHEMA))
+      .go();
 
-      testBuilder()
-        .sqlQuery(ctasQuery)
-        .unOrdered()
-        .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format("A table or view with given name [%s] already exists in schema [%s]", newTblName, DFS_TMP_SCHEMA))
-        .go();
-    } finally {
-      test(String.format("DROP TABLE IF EXISTS %s.%s", DFS_TMP_SCHEMA, newTblName));
-    }
+    test("DROP TABLE IF EXISTS %s.%s", DFS_TMP_SCHEMA, newTblName);
   }
 
   @Test // DRILL-5952
   public void testCreateTemporaryTableIfNotExistsWhenViewWithSameNameAlreadyExists() throws Exception{
     final String newTblName = "createTemporaryTableIfNotExistsWhenAViewWithSameNameAlreadyExists";
+    test("CREATE VIEW %s.%s AS SELECT * from cp.`region.json`", DFS_TMP_SCHEMA, newTblName);
 
-    try {
-      String ctasQuery = String.format("CREATE VIEW %s.%s AS SELECT * from cp.`region.json`", DFS_TMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
-
-      ctasQuery =
-        String.format("CREATE TEMPORARY TABLE IF NOT EXISTS %s.%s AS SELECT * FROM cp.`employee.json`", DFS_TMP_SCHEMA, newTblName);
+    testBuilder()
+      .sqlQuery("CREATE TEMPORARY TABLE IF NOT EXISTS %s.%s AS SELECT * FROM cp.`employee.json`", DFS_TMP_SCHEMA, newTblName)
+      .unOrdered()
+      .baselineColumns("ok", "summary")
+      .baselineValues(false, String.format("A table or view with given name [%s] already exists in schema [%s]", newTblName, DFS_TMP_SCHEMA))
+      .go();
 
-      testBuilder()
-        .sqlQuery(ctasQuery)
-        .unOrdered()
-        .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format("A table or view with given name [%s] already exists in schema [%s]", newTblName, DFS_TMP_SCHEMA))
-        .go();
-    } finally {
-      test(String.format("DROP VIEW IF EXISTS %s.%s", DFS_TMP_SCHEMA, newTblName));
-    }
+    test("DROP VIEW IF EXISTS %s.%s", DFS_TMP_SCHEMA, newTblName);
   }
 
   @Test // DRILL-5952
   public void testCreateTemporaryTableIfNotExistsWhenTableWithSameNameDoesNotExist() throws Exception{
     final String newTblName = "createTemporaryTableIfNotExistsWhenATableWithSameNameDoesNotExist";
-
-    try {
-      String ctasQuery = String.format("CREATE TEMPORARY TABLE IF NOT EXISTS %s.%s AS SELECT * FROM cp.`employee.json`", DFS_TMP_SCHEMA, newTblName);
-
-      test(ctasQuery);
-
-    } finally {
-      test(String.format("DROP TABLE IF EXISTS %s.%s", DFS_TMP_SCHEMA, newTblName));
-    }
+    test("CREATE TEMPORARY TABLE IF NOT EXISTS %s.%s AS SELECT * FROM cp.`employee.json`", DFS_TMP_SCHEMA, newTblName);
+    test("DROP TABLE IF EXISTS %s.%s", DFS_TMP_SCHEMA, newTblName);
   }
 
   @Test
@@ -498,47 +402,26 @@ public class TestCTTAS extends BaseTestQuery {
         .go();
   }
 
-  @Test(expected = UserRemoteException.class)
+  @Test
   public void testDropTemporaryTableAsViewWithException() throws Exception {
     String temporaryTableName = "temporary_table_to_drop_like_view_with_exception";
     test("create TEMPORARY table %s as select 'A' as c1 from (values(1))", temporaryTableName);
 
-    try {
-      test("drop view %s.%s", DFS_TMP_SCHEMA, temporaryTableName);
-    } catch (UserRemoteException e) {
-      assertThat(e.getMessage(), containsString(String.format(
-              "VALIDATION ERROR: Unknown view [%s] in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA)));
-      throw e;
-    }
+    expectUserRemoteExceptionWithMessage(String.format(
+      "VALIDATION ERROR: Unknown view [%s] in schema [%s]", temporaryTableName, DFS_TMP_SCHEMA));
+
+    test("drop view %s.%s", DFS_TMP_SCHEMA, temporaryTableName);
   }
 
-  private void checkPermission(String tmpTableName) throws IOException {
-    List<Path> matchingPath = findTemporaryTableLocation(tmpTableName);
-    assertEquals("Only one directory should match temporary table name " + tmpTableName, 1, matchingPath.size());
-    Path tmpTablePath = matchingPath.get(0);
-    assertEquals("Directory permission should match",
-        expectedFolderPermission, fs.getFileStatus(tmpTablePath).getPermission());
-    RemoteIterator<LocatedFileStatus> fileIterator = fs.listFiles(tmpTablePath, false);
-    while (fileIterator.hasNext()) {
-      assertEquals("File permission should match", expectedFilePermission, fileIterator.next().getPermission());
-    }
+  private void expectUserRemoteExceptionWithMessage(String message) {
+    thrown.expect(UserRemoteException.class);
+    thrown.expectMessage(containsString(message));
   }
 
-  private List<Path> findTemporaryTableLocation(String tableName) throws IOException {
-    Path sessionTempLocation = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), session_id.toString());
-    assertTrue("Session temporary location must exist", fs.exists(sessionTempLocation));
-    assertEquals("Session temporary location permission should match",
-        expectedFolderPermission, fs.getFileStatus(sessionTempLocation).getPermission());
-    String tableUUID =  UUID.nameUUIDFromBytes(tableName.getBytes()).toString();
-
-    RemoteIterator<LocatedFileStatus> pathList = fs.listLocatedStatus(sessionTempLocation);
-    List<Path> matchingPath = Lists.newArrayList();
-    while (pathList.hasNext()) {
-      LocatedFileStatus path = pathList.next();
-      if (path.isDirectory() && path.getPath().getName().equals(tableUUID)) {
-        matchingPath.add(path.getPath());
-      }
-    }
-    return matchingPath;
+  private void expectUserRemoteExceptionWithTableExistsMessage(String tableName, String schemaName) {
+    expectUserRemoteExceptionWithMessage(String.format(
+      "VALIDATION ERROR: A table or view with given name [%s]" +
+        " already exists in schema [%s]", tableName, schemaName));
   }
+
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
index c2add41..307faab 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
@@ -28,10 +28,7 @@ import org.apache.drill.exec.vector.UInt4Vector;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.runners.MockitoJUnitRunner;
 
-@RunWith(MockitoJUnitRunner.class)
 public class TestEmptyPopulation extends BaseTestQuery {
 
   private UInt4Vector offsets;
diff --git a/pom.xml b/pom.xml
index 4272978..151f208 100644
--- a/pom.xml
+++ b/pom.xml
@@ -798,9 +798,9 @@
     <!-- Test Dependencies -->
     <dependency>
       <!-- JMockit needs to be on class path before JUnit. -->
-      <groupId>com.googlecode.jmockit</groupId>
+      <groupId>org.jmockit</groupId>
       <artifactId>jmockit</artifactId>
-      <version>1.3</version>
+      <version>1.39</version>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -814,7 +814,8 @@
            long as Mockito _contains_ older Hamcrest classes.  See DRILL-2130. -->
       <groupId>org.mockito</groupId>
       <artifactId>mockito-core</artifactId>
-      <version>1.9.5</version>
+      <version>2.18.3</version>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>ch.qos.logback</groupId>

-- 
To stop receiving notification emails like this one, please contact
amansinha@apache.org.

[drill] 02/02: DRILL-6272: Refactor dynamic UDFs and function initializer tests to generate needed binary and source jars at runtime

Posted by am...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

amansinha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit ca90229b6deea40282927f8ab5c07715a4e18620
Author: Arina Ielchiieva <ar...@gmail.com>
AuthorDate: Sat May 5 15:32:07 2018 +0300

    DRILL-6272: Refactor dynamic UDFs and function initializer tests to generate needed binary and source jars at runtime
    
    close apache/drill#1225
---
 exec/java-exec/pom.xml                             |  48 ++
 .../exec/expr/fn/FunctionInitializerTest.java      |  54 +--
 .../apache/drill/exec/udf/dynamic/JarBuilder.java  |  96 ++++
 .../udf/dynamic}/TestDynamicUDFSupport.java        | 523 ++++++++++-----------
 .../java-exec/src/test/resources/drill-udf/pom.xml |  92 ++++
 .../drill/udf/dynamic/CustomAbsFunction.java       |  63 +++
 .../drill/udf/dynamic/CustomLogFunction.java       |  58 +++
 .../udf/dynamic/CustomLowerDummyFunction.java      |  58 +++
 .../drill/udf/dynamic/CustomLowerFunction.java     |  64 +++
 .../drill/udf/dynamic/CustomLowerFunctionV2.java   |  64 +++
 .../drill/udf/dynamic/CustomUpperFunction.java     |  64 +++
 .../apache/drill/udf/dynamic/LowerFunction.java    |  64 +++
 .../drill-udf/src/main/resources/drill-module.conf |   1 +
 .../test/resources/jars/DrillUDF-1.0-sources.jar   | Bin 1892 -> 0 bytes
 .../src/test/resources/jars/DrillUDF-1.0.jar       | Bin 3146 -> 0 bytes
 .../test/resources/jars/DrillUDF-2.0-sources.jar   | Bin 1891 -> 0 bytes
 .../src/test/resources/jars/DrillUDF-2.0.jar       | Bin 3142 -> 0 bytes
 .../jars/DrillUDF-overloading-1.0-sources.jar      | Bin 3473 -> 0 bytes
 .../resources/jars/DrillUDF-overloading-1.0.jar    | Bin 5779 -> 0 bytes
 .../resources/jars/DrillUDF_Copy-1.0-sources.jar   | Bin 1892 -> 0 bytes
 .../src/test/resources/jars/DrillUDF_Copy-1.0.jar  | Bin 3185 -> 0 bytes
 .../jars/DrillUDF_DupFunc-1.0-sources.jar          | Bin 1888 -> 0 bytes
 .../test/resources/jars/DrillUDF_DupFunc-1.0.jar   | Bin 3201 -> 0 bytes
 .../resources/jars/DrillUDF_Empty-1.0-sources.jar  | Bin 536 -> 0 bytes
 .../src/test/resources/jars/DrillUDF_Empty-1.0.jar | Bin 1863 -> 0 bytes
 .../jars/DrillUDF_NoMarkerFile-1.0-sources.jar     | Bin 1715 -> 0 bytes
 .../resources/jars/DrillUDF_NoMarkerFile-1.0.jar   | Bin 3084 -> 0 bytes
 .../resources/jars/v2/DrillUDF-1.0-sources.jar     | Bin 1899 -> 0 bytes
 .../src/test/resources/jars/v2/DrillUDF-1.0.jar    | Bin 3215 -> 0 bytes
 29 files changed, 948 insertions(+), 301 deletions(-)

diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index cbc3a02..345e240 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -584,6 +584,54 @@
       <artifactId>netty-tcnative</artifactId>
       <classifier>${netty.tcnative.classifier}</classifier>
     </dependency>
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-embedder</artifactId>
+      <version>3.5.3</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven</groupId>
+      <artifactId>maven-compat</artifactId>
+      <version>3.5.3</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.aether</groupId>
+      <artifactId>aether-connector-basic</artifactId>
+      <version>1.1.0</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.aether</groupId>
+      <artifactId>aether-transport-wagon</artifactId>
+      <version>1.1.0</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven.wagon</groupId>
+      <artifactId>wagon-http</artifactId>
+      <version>3.0.0</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>commons-logging</groupId>
+          <artifactId>commons-logging</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.maven.wagon</groupId>
+      <artifactId>wagon-provider-api</artifactId>
+      <version>3.0.0</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>commons-logging</groupId>
+          <artifactId>commons-logging</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
   </dependencies>
 
   <profiles>
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java
index 7c10bd3..2ecb8a0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/fn/FunctionInitializerTest.java
@@ -19,20 +19,20 @@ package org.apache.drill.exec.expr.fn;
 
 import com.google.common.collect.Lists;
 import org.apache.drill.categories.SqlFunctionTest;
-import org.apache.drill.test.TestTools;
+import org.apache.drill.exec.udf.dynamic.JarBuilder;
 import org.apache.drill.exec.util.JarUtil;
 import org.codehaus.janino.Java.CompilationUnit;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.runners.MockitoJUnitRunner;
-import org.mockito.stubbing.Answer;
+import org.junit.rules.TemporaryFolder;
 
+import java.io.File;
+import java.io.IOException;
 import java.net.URL;
 import java.net.URLClassLoader;
-import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
@@ -45,25 +45,27 @@ import java.util.concurrent.atomic.AtomicInteger;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.spy;
 
-@RunWith(MockitoJUnitRunner.class)
 @Category(SqlFunctionTest.class)
 public class FunctionInitializerTest {
 
-  private static final String CLASS_NAME = "com.drill.udf.CustomLowerFunction";
+  @ClassRule
+  public static final TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+  private static final String CLASS_NAME = "org.apache.drill.udf.dynamic.CustomLowerFunction";
   private static URLClassLoader classLoader;
 
   @BeforeClass
   public static void init() throws Exception {
-    Path jars = TestTools.WORKING_PATH
-      .resolve(TestTools.TEST_RESOURCES_REL)
-      .resolve("jars");
-    String binaryName = "DrillUDF-1.0.jar";
-    String sourceName = JarUtil.getSourceName(binaryName);
-    URL[] urls = {jars.resolve(binaryName).toUri().toURL(), jars.resolve(sourceName).toUri().toURL()};
+    File buildDirectory = temporaryFolder.getRoot();
+    String binaryName = "drill-custom-lower";
+
+    JarBuilder jarBuilder = new JarBuilder("src/test/resources/drill-udf");
+    String binaryJar = jarBuilder.build(binaryName, buildDirectory.getAbsolutePath(), "**/CustomLowerFunction.java", null);
+
+    URL[] urls = {
+      Paths.get(buildDirectory.getPath(), binaryJar).toUri().toURL(),
+      Paths.get(buildDirectory.getPath(), JarUtil.getSourceName(binaryJar)).toUri().toURL()};
     classLoader = new URLClassLoader(urls);
   }
 
@@ -94,27 +96,21 @@ public class FunctionInitializerTest {
 
   @Test
   public void testConcurrentFunctionBodyLoad() throws Exception {
-    final FunctionInitializer spyFunctionInitializer = spy(new FunctionInitializer(CLASS_NAME, classLoader));
     final AtomicInteger counter = new AtomicInteger();
-
-    doAnswer(new Answer<CompilationUnit>() {
+    final FunctionInitializer functionInitializer = new FunctionInitializer(CLASS_NAME, classLoader) {
       @Override
-      public CompilationUnit answer(InvocationOnMock invocation) throws Throwable {
+      CompilationUnit convertToCompilationUnit(Class<?> clazz) throws IOException {
         counter.incrementAndGet();
-        return (CompilationUnit) invocation.callRealMethod();
+        return super.convertToCompilationUnit(clazz);
       }
-    }).when(spyFunctionInitializer).convertToCompilationUnit(any(Class.class));
+    };
 
     int threadsNumber = 5;
     ExecutorService executor = Executors.newFixedThreadPool(threadsNumber);
 
     try {
-      List<Future<String>> results = executor.invokeAll(Collections.nCopies(threadsNumber, new Callable<String>() {
-        @Override
-        public String call() {
-          return spyFunctionInitializer.getMethod("eval");
-        }
-      }));
+      List<Future<String>> results = executor.invokeAll(Collections.nCopies(threadsNumber,
+        (Callable<String>) () -> functionInitializer.getMethod("eval")));
 
       final Set<String> uniqueResults = new HashSet<>();
       for (Future<String> result : results) {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java
new file mode 100644
index 0000000..4861c30
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/JarBuilder.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.udf.dynamic;
+
+import org.apache.maven.cli.MavenCli;
+import org.apache.maven.cli.logging.Slf4jLogger;
+import org.codehaus.plexus.DefaultPlexusContainer;
+import org.codehaus.plexus.PlexusContainer;
+import org.codehaus.plexus.logging.BaseLoggerManager;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class JarBuilder {
+
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JarBuilder.class);
+  private static final String MAVEN_MULTI_MODULE_PROJECT_DIRECTORY = "maven.multiModuleProjectDirectory";
+
+  private final MavenCli cli;
+  private final String projectDirectory;
+
+  public JarBuilder(String projectDirectory) {
+    this.cli = new MavenCli() {
+      @Override
+      protected void customizeContainer(PlexusContainer container) {
+        ((DefaultPlexusContainer) container).setLoggerManager(new BaseLoggerManager() {
+          @Override
+          protected org.codehaus.plexus.logging.Logger createLogger(String s) {
+            return new Slf4jLogger(logger);
+          }
+        });
+      }
+    };
+    this.projectDirectory = projectDirectory;
+  }
+
+  /**
+   * Builds jars using embedded maven in provided build directory.
+   * Includes files / resources based given pattern, otherwise using defaults provided in pom.xml.
+   * Checks if build exit code is 0, i.e. build was successful.
+   *
+   * @param jarName jar name
+   * @param buildDirectory build directory
+   * @param includeFiles pattern indicating which files should be included
+   * @param includeResources pattern indicating which resources should be included
+   *
+   * @return binary jar name with jar extension (my-jar.jar)
+   */
+  public String build(String jarName, String buildDirectory, String includeFiles, String includeResources) {
+    String originalPropertyValue = System.setProperty(MAVEN_MULTI_MODULE_PROJECT_DIRECTORY, projectDirectory);
+    try {
+      List<String> params = new LinkedList<>();
+      params.add("clean");
+      params.add("package");
+      params.add("-DskipTests");
+      // uncomment to build with current Drill version
+      // params.add("-Ddrill.version=" + DrillVersionInfo.getVersion());
+      params.add("-Djar.finalName=" + jarName);
+      params.add("-Dcustom.buildDirectory=" + buildDirectory);
+      if (includeFiles != null) {
+        params.add("-Dinclude.files=" + includeFiles);
+      }
+      if (includeResources != null) {
+        params.add("-Dinclude.resources=" + includeResources);
+      }
+      int result = cli.doMain(params.toArray(new String[params.size()]), projectDirectory, System.out, System.err);
+      assertEquals("Build should be successful.", 0, result);
+      return jarName + ".jar";
+    } finally {
+      if (originalPropertyValue != null) {
+        System.setProperty(MAVEN_MULTI_MODULE_PROJECT_DIRECTORY, originalPropertyValue);
+      } else {
+        System.clearProperty(MAVEN_MULTI_MODULE_PROJECT_DIRECTORY);
+      }
+    }
+  }
+
+}
+
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
similarity index 65%
rename from exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
rename to exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
index 41da123..047026d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill;
+package org.apache.drill.exec.udf.dynamic;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.io.FileUtils;
@@ -26,7 +26,6 @@ import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.common.config.CommonConstants;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.VersionMismatchException;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
@@ -40,16 +39,13 @@ import org.apache.drill.exec.util.JarUtil;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.test.TestBuilder;
 import org.apache.hadoop.fs.FileSystem;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TestWatcher;
-import org.junit.runner.Description;
-import org.junit.runner.RunWith;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.runners.MockitoJUnitRunner;
-import org.mockito.stubbing.Answer;
+import org.junit.rules.ExpectedException;
 
 import java.io.File;
 import java.io.IOException;
@@ -66,9 +62,10 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.anyString;
+import static org.junit.Assert.fail;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyLong;
+import static org.mockito.ArgumentMatchers.anyString;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.reset;
@@ -76,23 +73,37 @@ import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 
-@RunWith(MockitoJUnitRunner.class)
 @Category({SlowTest.class, SqlFunctionTest.class})
 public class TestDynamicUDFSupport extends BaseTestQuery {
 
-  private static final Path jars = TestTools.WORKING_PATH
-    .resolve(TestTools.TEST_RESOURCES_REL)
-    .resolve("jars");
-  private static final String default_binary_name = "DrillUDF-1.0.jar";
-  private static final String UDF_SUB_DIR = "udf";
-  private static final String default_source_name = JarUtil.getSourceName(default_binary_name);
+  private static final String DEFAULT_JAR_NAME = "drill-custom-lower";
   private static URI fsUri;
   private static File udfDir;
+  private static File jarsDir;
+  private static File buildDirectory;
+  private static JarBuilder jarBuilder;
+  private static String defaultBinaryJar;
+  private static String defaultSourceJar;
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
 
   @BeforeClass
-  public static void setup() throws IOException {
-    udfDir = dirTestWatcher.makeSubDir(Paths.get(UDF_SUB_DIR));
+  public static void buildAndStoreDefaultJars() throws IOException {
+    jarsDir = dirTestWatcher.makeSubDir(Paths.get("jars"));
+    buildDirectory = dirTestWatcher.makeSubDir(Paths.get("drill-udf"));
+
+    jarBuilder = new JarBuilder("src/test/resources/drill-udf");
+    defaultBinaryJar = buildJars(DEFAULT_JAR_NAME, "**/CustomLowerFunction.java", null);
+    defaultSourceJar = JarUtil.getSourceName(defaultBinaryJar);
+
+    FileUtils.copyFileToDirectory(new File(buildDirectory, defaultBinaryJar), jarsDir);
+    FileUtils.copyFileToDirectory(new File(buildDirectory, defaultSourceJar), jarsDir);
+  }
 
+  @Before
+  public void setupNewDrillbit() throws Exception {
+    udfDir = dirTestWatcher.makeSubDir(Paths.get("udf"));
     Properties overrideProps = new Properties();
     overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, udfDir.getAbsolutePath());
     overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
@@ -101,29 +112,12 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     fsUri = getLocalFileSystem().getUri();
   }
 
-  @Rule
-  public final TestWatcher clearDirs = new TestWatcher() {
-    @Override
-    protected void succeeded(Description description) {
-      reset();
-    }
-
-    @Override
-    protected void failed(Throwable e, Description description) {
-      reset();
-    }
-
-    private void reset() {
-      try {
-        closeClient();
-        FileUtils.cleanDirectory(udfDir);
-        dirTestWatcher.clear();
-        setup();
-      } catch (Exception e) {
-        throw new RuntimeException(e);
-      }
-    }
-  };
+  @After
+  public void cleanup() throws Exception {
+    closeClient();
+    FileUtils.cleanDirectory(udfDir);
+    dirTestWatcher.clear();
+  }
 
   @Test
   public void testSyntax() throws Exception {
@@ -143,18 +137,26 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
   }
 
   @Test
-  public void testDisableDynamicSupport() throws Exception {
+  public void testDisableDynamicSupportCreate() throws Exception {
     try {
       test("alter system set `exec.udf.enable_dynamic_support` = false");
-      String[] actions = new String[] {"create", "drop"};
-      String query = "%s function using jar 'jar_name.jar'";
-      for (String action : actions) {
-        try {
-          test(query, action);
-        } catch (UserRemoteException e) {
-          assertThat(e.getMessage(), containsString("Dynamic UDFs support is disabled."));
-        }
-      }
+      String query = "create function using jar 'jar_name.jar'";
+      thrown.expect(UserRemoteException.class);
+      thrown.expectMessage(containsString("Dynamic UDFs support is disabled."));
+      test(query);
+    } finally {
+      test("alter system reset `exec.udf.enable_dynamic_support`");
+    }
+  }
+
+  @Test
+  public void testDisableDynamicSupportDrop() throws Exception {
+    try {
+      test("alter system set `exec.udf.enable_dynamic_support` = false");
+      String query = "drop function using jar 'jar_name.jar'";
+      thrown.expect(UserRemoteException.class);
+      thrown.expectMessage(containsString("Dynamic UDFs support is disabled."));
+      test(query);
     } finally {
       test("alter system reset `exec.udf.enable_dynamic_support`");
     }
@@ -162,13 +164,13 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testAbsentBinaryInStaging() throws Exception {
-    final Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
+    Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
 
     String summary = String.format("File %s does not exist on file system %s",
-        staging.resolve(default_binary_name).toUri().getPath(), fsUri);
+        staging.resolve(defaultBinaryJar).toUri().getPath(), fsUri);
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, summary)
@@ -177,15 +179,14 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testAbsentSourceInStaging() throws Exception {
-    final Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
-
-    copyJar(jars, staging, default_binary_name);
+    Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
+    copyJar(jarsDir.toPath(), staging, defaultBinaryJar);
 
     String summary = String.format("File %s does not exist on file system %s",
-        staging.resolve(default_source_name).toUri().getPath(), fsUri);
+        staging.resolve(defaultSourceJar).toUri().getPath(), fsUri);
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, summary)
@@ -194,32 +195,32 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testJarWithoutMarkerFile() throws Exception {
-    String jarWithNoMarkerFile = "DrillUDF_NoMarkerFile-1.0.jar";
-    copyJarsToStagingArea(jarWithNoMarkerFile, JarUtil.getSourceName(jarWithNoMarkerFile));
+    String jarName = "drill-no-marker";
+    String jar = buildAndCopyJarsToStagingArea(jarName, null, "**/dummy.conf");
 
     String summary = "Marker file %s is missing in %s";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", jarWithNoMarkerFile)
+        .sqlQuery("create function using jar '%s'", jar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, String.format(summary,
-            CommonConstants.DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME, jarWithNoMarkerFile))
+            CommonConstants.DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME, jar))
         .go();
   }
 
   @Test
   public void testJarWithoutFunctions() throws Exception {
-    String jarWithNoFunctions = "DrillUDF_Empty-1.0.jar";
-    copyJarsToStagingArea(jarWithNoFunctions, JarUtil.getSourceName(jarWithNoFunctions));
+    String jarName = "drill-no-functions";
+    String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomLowerDummyFunction.java", null);
 
     String summary = "Jar %s does not contain functions";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", jarWithNoFunctions)
+        .sqlQuery("create function using jar '%s'", jar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format(summary, jarWithNoFunctions))
+        .baselineValues(false, String.format(summary, jar))
         .go();
   }
 
@@ -231,10 +232,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
         "[custom_lower(VARCHAR-REQUIRED)]";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(true, String.format(summary, default_binary_name))
+        .baselineValues(true, String.format(summary, defaultBinaryJar))
         .go();
 
     RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
@@ -243,79 +244,81 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertFalse("Staging area should be empty", fs.listFiles(remoteFunctionRegistry.getStagingArea(), false).hasNext());
     assertFalse("Temporary area should be empty", fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
 
-    final Path path = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    Path path = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
 
     assertTrue("Binary should be present in registry area",
-      path.resolve(default_binary_name).toFile().exists());
+      path.resolve(defaultBinaryJar).toFile().exists());
     assertTrue("Source should be present in registry area",
-      path.resolve(default_source_name).toFile().exists());
+      path.resolve(defaultBinaryJar).toFile().exists());
 
     Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
     assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
-    assertEquals(registry.getJar(0).getName(), default_binary_name);
+    assertEquals(registry.getJar(0).getName(), defaultBinaryJar);
   }
 
   @Test
   public void testDuplicatedJarInRemoteRegistry() throws Exception {
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
     copyDefaultJarsToStagingArea();
 
     String summary = "Jar with %s name has been already registered";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format(summary, default_binary_name))
+        .baselineValues(false, String.format(summary, defaultBinaryJar))
         .go();
   }
 
   @Test
   public void testDuplicatedJarInLocalRegistry() throws Exception {
-    copyDefaultJarsToStagingArea();
+    String jarName = "drill-custom-upper";
+    String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomUpperFunction.java", null);
 
-    test("create function using jar '%s'", default_binary_name);
-    test("select custom_lower('A') from (values(1))");
+    test("create function using jar '%s'", jar);
+    test("select custom_upper('A') from (values(1))");
 
-    copyDefaultJarsToStagingArea();
+    copyJarsToStagingArea(buildDirectory.toPath(), jar,JarUtil.getSourceName(jar));
 
     String summary = "Jar with %s name has been already registered";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", jar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format(summary, default_binary_name))
+        .baselineValues(false, String.format(summary, jar))
         .go();
   }
 
   @Test
   public void testDuplicatedFunctionsInRemoteRegistry() throws Exception {
-    String jarWithDuplicate = "DrillUDF_Copy-1.0.jar";
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
-    copyJarsToStagingArea(jarWithDuplicate, JarUtil.getSourceName(jarWithDuplicate));
+    test("create function using jar '%s'", defaultBinaryJar);
+
+    String jarName = "drill-custom-lower-copy";
+    String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomLowerFunction.java", null);
 
     String summary = "Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", jarWithDuplicate)
+        .sqlQuery("create function using jar '%s'", jar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format(summary, default_binary_name))
+        .baselineValues(false, String.format(summary, defaultBinaryJar))
         .go();
   }
 
   @Test
   public void testDuplicatedFunctionsInLocalRegistry() throws Exception {
-    String jarWithDuplicate = "DrillUDF_DupFunc-1.0.jar";
-    copyJarsToStagingArea(jarWithDuplicate, JarUtil.getSourceName(jarWithDuplicate));
+    String jarName = "drill-lower";
+    String jar = buildAndCopyJarsToStagingArea(jarName, "**/LowerFunction.java", null);
 
     String summary = "Found duplicated function in %s: lower(VARCHAR-REQUIRED)";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", jarWithDuplicate)
+        .sqlQuery("create function using jar '%s'", jar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, String.format(summary, LocalFunctionRegistry.BUILT_IN))
@@ -324,10 +327,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testSuccessfulRegistrationAfterSeveralRetryAttempts() throws Exception {
-    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
-    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
-    final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
-    final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+    Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
 
     copyDefaultJarsToStagingArea();
 
@@ -340,10 +343,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
             "[custom_lower(VARCHAR-REQUIRED)]";
 
     testBuilder()
-            .sqlQuery("create function using jar '%s'", default_binary_name)
+            .sqlQuery("create function using jar '%s'", defaultBinaryJar)
             .unOrdered()
             .baselineColumns("ok", "summary")
-            .baselineValues(true, String.format(summary, default_binary_name))
+            .baselineValues(true, String.format(summary, defaultBinaryJar))
             .go();
 
     verify(remoteFunctionRegistry, times(3))
@@ -353,20 +356,20 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
 
     assertTrue("Binary should be present in registry area",
-      registryPath.resolve(default_binary_name).toFile().exists());
+      registryPath.resolve(defaultBinaryJar).toFile().exists());
     assertTrue("Source should be present in registry area",
-      registryPath.resolve(default_source_name).toFile().exists());
+      registryPath.resolve(defaultSourceJar).toFile().exists());
 
     Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
     assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
-    assertEquals(registry.getJar(0).getName(), default_binary_name);
+    assertEquals(registry.getJar(0).getName(), defaultBinaryJar);
   }
 
   @Test
   public void testSuccessfulUnregistrationAfterSeveralRetryAttempts() throws Exception {
     RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
 
     reset(remoteFunctionRegistry);
     doThrow(new VersionMismatchException("Version mismatch detected", 1))
@@ -378,10 +381,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
             "[custom_lower(VARCHAR-REQUIRED)]";
 
     testBuilder()
-            .sqlQuery("drop function using jar '%s'", default_binary_name)
+            .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
             .unOrdered()
             .baselineColumns("ok", "summary")
-            .baselineValues(true, String.format(summary, default_binary_name))
+            .baselineValues(true, String.format(summary, defaultBinaryJar))
             .go();
 
     verify(remoteFunctionRegistry, times(3))
@@ -396,10 +399,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testExceedRetryAttemptsDuringRegistration() throws Exception {
-    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
-    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
-    final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
-    final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+    Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
 
     copyDefaultJarsToStagingArea();
 
@@ -409,7 +412,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     String summary = "Failed to update remote function registry. Exceeded retry attempts limit.";
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, summary)
@@ -419,9 +422,9 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
         .updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
     assertTrue("Binary should be present in staging area",
-            stagingPath.resolve(default_binary_name).toFile().exists());
+            stagingPath.resolve(defaultBinaryJar).toFile().exists());
     assertTrue("Source should be present in staging area",
-            stagingPath.resolve(default_source_name).toFile().exists());
+            stagingPath.resolve(defaultSourceJar).toFile().exists());
 
     assertTrue("Registry area should be empty", ArrayUtils.isEmpty(registryPath.toFile().listFiles()));
     assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
@@ -432,11 +435,11 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testExceedRetryAttemptsDuringUnregistration() throws Exception {
-    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
-    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
 
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
 
     reset(remoteFunctionRegistry);
     doThrow(new VersionMismatchException("Version mismatch detected", 1))
@@ -445,7 +448,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     String summary = "Failed to update remote function registry. Exceeded retry attempts limit.";
 
     testBuilder()
-        .sqlQuery("drop function using jar '%s'", default_binary_name)
+        .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, summary)
@@ -455,25 +458,23 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
         .updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
     assertTrue("Binary should be present in registry area",
-      registryPath.resolve(default_binary_name).toFile().exists());
+      registryPath.resolve(defaultBinaryJar).toFile().exists());
     assertTrue("Source should be present in registry area",
-      registryPath.resolve(default_source_name).toFile().exists());
+      registryPath.resolve(defaultSourceJar).toFile().exists());
 
     Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
     assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
-    assertEquals(registry.getJar(0).getName(), default_binary_name);
+    assertEquals(registry.getJar(0).getName(), defaultBinaryJar);
   }
 
   @Test
   public void testLazyInit() throws Exception {
-    try {
-      test("select custom_lower('A') from (values(1))");
-    } catch (UserRemoteException e){
-      assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
-    }
+    thrown.expect(UserRemoteException.class);
+    thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
+    test("select custom_lower('A') from (values(1))");
 
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
     testBuilder()
         .sqlQuery("select custom_lower('A') as res from (values(1))")
         .unOrdered()
@@ -485,21 +486,19 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
       getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir", true));
 
     assertTrue("Binary should exist in local udf directory",
-      localUdfDirPath.resolve(default_binary_name).toFile().exists());
+      localUdfDirPath.resolve(defaultBinaryJar).toFile().exists());
     assertTrue("Source should exist in local udf directory",
-      localUdfDirPath.resolve(default_source_name).toFile().exists());
+      localUdfDirPath.resolve(defaultSourceJar).toFile().exists());
   }
 
   @Test
   public void testLazyInitWhenDynamicUdfSupportIsDisabled() throws Exception {
-    try {
-      test("select custom_lower('A') from (values(1))");
-    } catch (UserRemoteException e){
-      assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
-    }
+    thrown.expect(UserRemoteException.class);
+    thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
+    test("select custom_lower('A') from (values(1))");
 
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
 
     try {
       testBuilder()
@@ -516,9 +515,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testOverloadedFunctionPlanningStage() throws Exception {
-    String jarName = "DrillUDF-overloading-1.0.jar";
-    copyJarsToStagingArea(jarName, JarUtil.getSourceName(jarName));
-    test("create function using jar '%s'", jarName);
+    String jarName = "drill-custom-abs";
+    String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomAbsFunction.java", null);
+
+    test("create function using jar '%s'", jar);
 
     testBuilder()
         .sqlQuery("select abs('A', 'A') as res from (values(1))")
@@ -530,9 +530,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testOverloadedFunctionExecutionStage() throws Exception {
-    String jarName = "DrillUDF-overloading-1.0.jar";
-    copyJarsToStagingArea(jarName, JarUtil.getSourceName(jarName));
-    test("create function using jar '%s'", jarName);
+    String jarName = "drill-custom-log";
+    String jar = buildAndCopyJarsToStagingArea(jarName, "**/CustomLogFunction.java", null);
+
+    test("create function using jar '%s'", jar);
 
     testBuilder()
         .sqlQuery("select log('A') as res from (values(1))")
@@ -545,67 +546,65 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
   @Test
   public void testDropFunction() throws Exception {
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
     test("select custom_lower('A') from (values(1))");
 
     Path localUdfDirPath = hadoopToJavaPath((org.apache.hadoop.fs.Path)FieldUtils.readField(
         getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir", true));
 
     assertTrue("Binary should exist in local udf directory",
-      localUdfDirPath.resolve(default_binary_name).toFile().exists());
+      localUdfDirPath.resolve(defaultBinaryJar).toFile().exists());
     assertTrue("Source should exist in local udf directory",
-      localUdfDirPath.resolve(default_source_name).toFile().exists());
+      localUdfDirPath.resolve(defaultSourceJar).toFile().exists());
 
     String summary = "The following UDFs in jar %s have been unregistered:\n" +
         "[custom_lower(VARCHAR-REQUIRED)]";
 
     testBuilder()
-        .sqlQuery("drop function using jar '%s'", default_binary_name)
+        .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(true, String.format(summary, default_binary_name))
+        .baselineValues(true, String.format(summary, defaultBinaryJar))
         .go();
 
-    try {
-      test("select custom_lower('A') from (values(1))");
-    } catch (UserRemoteException e){
-      assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
-    }
+    thrown.expect(UserRemoteException.class);
+    thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
+    test("select custom_lower('A') from (values(1))");
 
-    final RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
-    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
+    Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
 
     assertEquals("Remote registry should be empty",
         remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
 
     assertFalse("Binary should not be present in registry area",
-      registryPath.resolve(default_binary_name).toFile().exists());
+      registryPath.resolve(defaultBinaryJar).toFile().exists());
     assertFalse("Source should not be present in registry area",
-      registryPath.resolve(default_source_name).toFile().exists());
+      registryPath.resolve(defaultSourceJar).toFile().exists());
 
     assertFalse("Binary should not be present in local udf directory",
-      localUdfDirPath.resolve(default_binary_name).toFile().exists());
+      localUdfDirPath.resolve(defaultBinaryJar).toFile().exists());
     assertFalse("Source should not be present in local udf directory",
-      localUdfDirPath.resolve(default_source_name).toFile().exists());
+      localUdfDirPath.resolve(defaultSourceJar).toFile().exists());
   }
 
   @Test
   public void testReRegisterTheSameJarWithDifferentContent() throws Exception {
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
     testBuilder()
         .sqlQuery("select custom_lower('A') as res from (values(1))")
         .unOrdered()
         .baselineColumns("res")
         .baselineValues("a")
         .go();
-    test("drop function using jar '%s'", default_binary_name);
+    test("drop function using jar '%s'", defaultBinaryJar);
 
     Thread.sleep(1000);
 
-    Path src = jars.resolve("v2");
-    copyJarsToStagingArea(src, default_binary_name, default_source_name);
-    test("create function using jar '%s'", default_binary_name);
+    buildAndCopyJarsToStagingArea(DEFAULT_JAR_NAME, "**/CustomLowerFunctionV2.java", null);
+
+    test("create function using jar '%s'", defaultBinaryJar);
     testBuilder()
         .sqlQuery("select custom_lower('A') as res from (values(1))")
         .unOrdered()
@@ -619,36 +618,33 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     String summary = "Jar %s is not registered in remote registry";
 
     testBuilder()
-        .sqlQuery("drop function using jar '%s'", default_binary_name)
+        .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(false, String.format(summary, default_binary_name))
+        .baselineValues(false, String.format(summary, defaultBinaryJar))
         .go();
   }
 
   @Test
   public void testRegistrationFailDuringRegistryUpdate() throws Exception {
-    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
-    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
-    final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
-    final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+    Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
 
     final String errorMessage = "Failure during remote registry update.";
-    doAnswer(new Answer<Void>() {
-      @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
-        assertTrue("Binary should be present in registry area",
-            registryPath.resolve(default_binary_name).toFile().exists());
-        assertTrue("Source should be present in registry area",
-            registryPath.resolve(default_source_name).toFile().exists());
-        throw new RuntimeException(errorMessage);
-      }
+    doAnswer(invocation -> {
+      assertTrue("Binary should be present in registry area",
+          registryPath.resolve(defaultBinaryJar).toFile().exists());
+      assertTrue("Source should be present in registry area",
+          registryPath.resolve(defaultSourceJar).toFile().exists());
+      throw new RuntimeException(errorMessage);
     }).when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
     copyDefaultJarsToStagingArea();
 
     testBuilder()
-        .sqlQuery("create function using jar '%s'", default_binary_name)
+        .sqlQuery("create function using jar '%s'", defaultBinaryJar)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, errorMessage)
@@ -657,8 +653,8 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertTrue("Registry area should be empty", ArrayUtils.isEmpty(registryPath.toFile().listFiles()));
     assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
 
-    assertTrue("Binary should be present in staging area", stagingPath.resolve(default_binary_name).toFile().exists());
-    assertTrue("Source should be present in staging area", stagingPath.resolve(default_source_name).toFile().exists());
+    assertTrue("Binary should be present in staging area", stagingPath.resolve(defaultBinaryJar).toFile().exists());
+    assertTrue("Source should be present in staging area", stagingPath.resolve(defaultSourceJar).toFile().exists());
   }
 
   @Test
@@ -668,21 +664,18 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     final CountDownLatch latch1 = new CountDownLatch(1);
     final CountDownLatch latch2 = new CountDownLatch(1);
 
-    doAnswer(new Answer<String>() {
-      @Override
-      public String answer(InvocationOnMock invocation) throws Throwable {
-        String result = (String) invocation.callRealMethod();
-        latch2.countDown();
-        latch1.await();
-        return result;
-      }
+    doAnswer(invocation -> {
+      String result = (String) invocation.callRealMethod();
+      latch2.countDown();
+      latch1.await();
+      return result;
     })
         .doCallRealMethod()
         .doCallRealMethod()
         .when(remoteFunctionRegistry).addToJars(anyString(), any(RemoteFunctionRegistry.Action.class));
 
 
-    final String query = String.format("create function using jar '%s'", default_binary_name);
+    final String query = String.format("create function using jar '%s'", defaultBinaryJar);
 
     Thread thread = new Thread(new SimpleQueryRunner(query));
     thread.start();
@@ -695,14 +688,14 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
           .sqlQuery(query)
           .unOrdered()
           .baselineColumns("ok", "summary")
-          .baselineValues(false, String.format(summary, default_binary_name))
+          .baselineValues(false, String.format(summary, defaultBinaryJar))
           .go();
 
       testBuilder()
-          .sqlQuery("drop function using jar '%s'", default_binary_name)
+          .sqlQuery("drop function using jar '%s'", defaultBinaryJar)
           .unOrdered()
           .baselineColumns("ok", "summary")
-          .baselineValues(false, String.format(summary, default_binary_name))
+          .baselineValues(false, String.format(summary, defaultBinaryJar))
           .go();
 
     } finally {
@@ -719,51 +712,45 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     final CountDownLatch latch2 = new CountDownLatch(1);
     final CountDownLatch latch3 = new CountDownLatch(1);
 
-    doAnswer(new Answer<Void>() {
-      @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
-        latch3.countDown();
-        latch1.await();
-        invocation.callRealMethod();
-        latch2.countDown();
-        return null;
-      }
-    }).doAnswer(new Answer<Void>() {
-      @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
-        latch1.countDown();
-        latch2.await();
-        invocation.callRealMethod();
-        return null;
-      }
+    doAnswer(invocation -> {
+      latch3.countDown();
+      latch1.await();
+      invocation.callRealMethod();
+      latch2.countDown();
+      return null;
+    }).doAnswer(invocation -> {
+      latch1.countDown();
+      latch2.await();
+      invocation.callRealMethod();
+      return null;
     })
         .when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
+    final String jar1 = defaultBinaryJar;
+    copyDefaultJarsToStagingArea();
 
-    final String jarName1 = default_binary_name;
-    final String jarName2 = "DrillUDF_Copy-1.0.jar";
-    final String query = "create function using jar '%s'";
+    final String copyJarName = "drill-custom-lower-copy";
+    final String jar2 = buildAndCopyJarsToStagingArea(copyJarName, "**/CustomLowerFunction.java", null);
 
-    copyDefaultJarsToStagingArea();
-    copyJarsToStagingArea(jarName2, JarUtil.getSourceName(jarName2));
+    final String query = "create function using jar '%s'";
 
     Thread thread1 = new Thread(new TestBuilderRunner(
         testBuilder()
-        .sqlQuery(query, jarName1)
+        .sqlQuery(query, jar1)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true,
             String.format("The following UDFs in jar %s have been registered:\n" +
-            "[custom_lower(VARCHAR-REQUIRED)]", jarName1))
+            "[custom_lower(VARCHAR-REQUIRED)]", jar1))
     ));
 
     Thread thread2 = new Thread(new TestBuilderRunner(
         testBuilder()
-            .sqlQuery(query, jarName2)
+            .sqlQuery(query, jar2)
             .unOrdered()
             .baselineColumns("ok", "summary")
             .baselineValues(false,
-                String.format("Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)", jarName1))
+                String.format("Found duplicated function in %s: custom_lower(VARCHAR-REQUIRED)", jar1))
     ));
 
     thread1.start();
@@ -778,7 +765,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertEquals("Remote registry version should match", 1, version.getVersion());
     List<Jar> jarList = registry.getJarList();
     assertEquals("Only one jar should be registered", 1, jarList.size());
-    assertEquals("Jar name should match", jarName1, jarList.get(0).getName());
+    assertEquals("Jar name should match", jar1, jarList.get(0).getName());
 
     verify(remoteFunctionRegistry, times(2)).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
   }
@@ -789,43 +776,40 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     final CountDownLatch latch1 = new CountDownLatch(1);
     final CountDownLatch latch2 = new CountDownLatch(2);
 
-    doAnswer(new Answer<Void>() {
-      @Override
-      public Void answer(InvocationOnMock invocation) throws Throwable {
-        latch2.countDown();
-        latch1.await();
-        invocation.callRealMethod();
-        return null;
-      }
+    doAnswer(invocation -> {
+      latch2.countDown();
+      latch1.await();
+      invocation.callRealMethod();
+      return null;
     })
         .when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
-    final String jarName1 = default_binary_name;
-    final String jarName2 = "DrillUDF-2.0.jar";
-    final String query = "create function using jar '%s'";
-
+    final String jar1 = defaultBinaryJar;
     copyDefaultJarsToStagingArea();
-    copyJarsToStagingArea(jarName2, JarUtil.getSourceName(jarName2));
 
+    final String upperJarName = "drill-custom-upper";
+    final String jar2 = buildAndCopyJarsToStagingArea(upperJarName, "**/CustomUpperFunction.java", null);
+
+    final String query = "create function using jar '%s'";
 
     Thread thread1 = new Thread(new TestBuilderRunner(
         testBuilder()
-            .sqlQuery(query, jarName1)
+            .sqlQuery(query, jar1)
             .unOrdered()
             .baselineColumns("ok", "summary")
             .baselineValues(true,
                 String.format("The following UDFs in jar %s have been registered:\n" +
-                    "[custom_lower(VARCHAR-REQUIRED)]", jarName1))
+                    "[custom_lower(VARCHAR-REQUIRED)]", jar1))
     ));
 
 
     Thread thread2 = new Thread(new TestBuilderRunner(
         testBuilder()
-            .sqlQuery(query, jarName2)
+            .sqlQuery(query, jar2)
             .unOrdered()
             .baselineColumns("ok", "summary")
             .baselineValues(true, String.format("The following UDFs in jar %s have been registered:\n" +
-                "[custom_upper(VARCHAR-REQUIRED)]", jarName2))
+                "[custom_upper(VARCHAR-REQUIRED)]", jar2))
     ));
 
     thread1.start();
@@ -842,7 +826,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertEquals("Remote registry version should match", 2, version.getVersion());
 
     List<Jar> actualJars = registry.getJarList();
-    List<String> expectedJars = Lists.newArrayList(jarName1, jarName2);
+    List<String> expectedJars = Lists.newArrayList(jar1, jar2);
 
     assertEquals("Only one jar should be registered", 2, actualJars.size());
     for (Jar jar : actualJars) {
@@ -856,32 +840,26 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
   public void testLazyInitConcurrent() throws Exception {
     FunctionImplementationRegistry functionImplementationRegistry = spyFunctionImplementationRegistry();
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
+    test("create function using jar '%s'", defaultBinaryJar);
 
     final CountDownLatch latch1 = new CountDownLatch(1);
     final CountDownLatch latch2 = new CountDownLatch(1);
 
     final String query = "select custom_lower('A') from (values(1))";
 
-    doAnswer(new Answer<Boolean>() {
-      @Override
-      public Boolean answer(InvocationOnMock invocation) throws Throwable {
-        latch1.await();
-        boolean result = (boolean) invocation.callRealMethod();
-        assertTrue("syncWithRemoteRegistry() should return true", result);
-        latch2.countDown();
-        return true;
-      }
+    doAnswer(invocation -> {
+      latch1.await();
+      boolean result = (boolean) invocation.callRealMethod();
+      assertTrue("syncWithRemoteRegistry() should return true", result);
+      latch2.countDown();
+      return true;
     })
-        .doAnswer(new Answer() {
-          @Override
-          public Boolean answer(InvocationOnMock invocation) throws Throwable {
-            latch1.countDown();
-            latch2.await();
-            boolean result = (boolean) invocation.callRealMethod();
-            assertTrue("syncWithRemoteRegistry() should return true", result);
-            return true;
-          }
+        .doAnswer(invocation -> {
+          latch1.countDown();
+          latch2.await();
+          boolean result = (boolean) invocation.callRealMethod();
+          assertTrue("syncWithRemoteRegistry() should return true", result);
+          return true;
         })
         .when(functionImplementationRegistry).syncWithRemoteRegistry(anyLong());
 
@@ -905,23 +883,17 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
   public void testLazyInitNoReload() throws Exception {
     FunctionImplementationRegistry functionImplementationRegistry = spyFunctionImplementationRegistry();
     copyDefaultJarsToStagingArea();
-    test("create function using jar '%s'", default_binary_name);
-
-    doAnswer(new Answer<Boolean>() {
-      @Override
-      public Boolean answer(InvocationOnMock invocation) throws Throwable {
-        boolean result = (boolean) invocation.callRealMethod();
-        assertTrue("syncWithRemoteRegistry() should return true", result);
-        return true;
-      }
+    test("create function using jar '%s'", defaultBinaryJar);
+
+    doAnswer(invocation -> {
+      boolean result = (boolean) invocation.callRealMethod();
+      assertTrue("syncWithRemoteRegistry() should return true", result);
+      return true;
     })
-        .doAnswer(new Answer() {
-          @Override
-          public Boolean answer(InvocationOnMock invocation) throws Throwable {
-            boolean result = (boolean) invocation.callRealMethod();
-            assertFalse("syncWithRemoteRegistry() should return false", result);
-            return false;
-          }
+        .doAnswer(invocation -> {
+          boolean result = (boolean) invocation.callRealMethod();
+          assertFalse("syncWithRemoteRegistry() should return false", result);
+          return false;
         })
         .when(functionImplementationRegistry).syncWithRemoteRegistry(anyLong());
 
@@ -929,6 +901,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
     try {
       test("select unknown_lower('A') from (values(1))");
+      fail();
     } catch (UserRemoteException e){
       assertThat(e.getMessage(), containsString("No match found for function signature unknown_lower(<CHARACTER>)"));
     }
@@ -939,12 +912,18 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertEquals("Sync function registry version should match", 1L, localFunctionRegistry.getVersion());
   }
 
+  private static String buildJars(String jarName, String includeFiles, String includeResources) {
+    return jarBuilder.build(jarName, buildDirectory.getAbsolutePath(), includeFiles, includeResources);
+  }
+
   private void copyDefaultJarsToStagingArea() throws IOException {
-    copyJarsToStagingArea(jars, default_binary_name, default_source_name);
+    copyJarsToStagingArea(jarsDir.toPath(), defaultBinaryJar, defaultSourceJar);
   }
 
-  private void copyJarsToStagingArea(String binaryName, String sourceName) throws IOException  {
-    copyJarsToStagingArea(jars, binaryName, sourceName);
+  private String buildAndCopyJarsToStagingArea(String jarName, String includeFiles, String includeResources) throws IOException {
+    String binaryJar = buildJars(jarName, includeFiles, includeResources);
+    copyJarsToStagingArea(buildDirectory.toPath(), binaryJar, JarUtil.getSourceName(binaryJar));
+    return binaryJar;
   }
 
   private void copyJarsToStagingArea(Path src, String binaryName, String sourceName) throws IOException {
diff --git a/exec/java-exec/src/test/resources/drill-udf/pom.xml b/exec/java-exec/src/test/resources/drill-udf/pom.xml
new file mode 100644
index 0000000..7361845
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/pom.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>org.apache.drill.udf</groupId>
+  <artifactId>drill-udf</artifactId>
+  <version>1.0</version>
+
+  <properties>
+    <jar.finalName>${project.name}</jar.finalName>
+    <custom.buildDirectory>${project.basedir}/target</custom.buildDirectory>
+    <drill.version>1.13.0</drill.version>
+    <include.files>**/*.java</include.files>
+    <include.resources>**/*.conf</include.resources>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>drill-java-exec</artifactId>
+      <version>${drill.version}</version>
+      <scope>provided</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <directory>${custom.buildDirectory}</directory>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <includes>
+          <include>${include.resources}</include>
+        </includes>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.1</version>
+        <configuration>
+          <includes>
+            <include>${include.files}</include>
+          </includes>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-source-plugin</artifactId>
+        <version>2.4</version>
+        <configuration>
+          <finalName>${jar.finalName}</finalName>
+          <includes>
+            <include>${include.files}</include>
+          </includes>
+        </configuration>
+        <executions>
+          <execution>
+            <id>attach-sources</id>
+            <phase>package</phase>
+            <goals>
+              <goal>jar-no-fork</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+
+</project>
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java
new file mode 100644
index 0000000..9bdcffb
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomAbsFunction.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+    name="abs",
+    scope= FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomAbsFunction implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input1;
+
+  @Param
+  VarCharHolder input2;
+
+  @Output
+  VarCharHolder out;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+
+  }
+
+  public void eval() {
+    String inputString1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input1.start, input1.end, input1.buffer);
+    String inputString2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input2.start, input2.end, input2.buffer);
+    String outputValue = String.format("ABS was overloaded. Input: %s, %s", inputString1, inputString2);
+
+    out.buffer = buffer;
+    out.start = 0;
+    out.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java
new file mode 100644
index 0000000..fa49a35
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLogFunction.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+import javax.inject.Inject;
+
+@FunctionTemplate(
+    name="log",
+    scope= FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomLogFunction implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input;
+
+  @Output
+  VarCharHolder out;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+
+  }
+
+  public void eval() {
+    String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+    String outputValue = "LOG was overloaded. Input: " + inputString;
+
+    out.buffer = buffer;
+    out.start = 0;
+    out.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java
new file mode 100644
index 0000000..1e401d1
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerDummyFunction.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+public class CustomLowerDummyFunction implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input;
+
+  @Output
+  VarCharHolder output;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+  }
+
+  public void eval() {
+
+    // get value
+    String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+    // convert to lower case
+    String outputValue = inputString.toLowerCase();
+
+    // put the output value into output buffer
+    output.buffer = buffer;
+    output.start = 0;
+    output.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java
new file mode 100644
index 0000000..f868be3
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunction.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+    name="custom_lower",
+    scope = FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomLowerFunction implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input;
+
+  @Output
+  VarCharHolder output;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+  }
+
+  public void eval() {
+
+    // get value
+    String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+    // convert to lower case
+    String outputValue = inputString.toLowerCase();
+
+    // put the output value into output buffer
+    output.buffer = buffer;
+    output.start = 0;
+    output.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java
new file mode 100644
index 0000000..e564d7f
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomLowerFunctionV2.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+    name="custom_lower",
+    scope = FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomLowerFunctionV2 implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input;
+
+  @Output
+  VarCharHolder output;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+  }
+
+  public void eval() {
+
+    // get value
+    String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+    // convert to lower case
+    String outputValue = inputString.toLowerCase() + "_v2";
+
+    // put the output value into output buffer
+    output.buffer = buffer;
+    output.start = 0;
+    output.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java
new file mode 100644
index 0000000..9ac473b
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/CustomUpperFunction.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+    name="custom_upper",
+    scope = FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class CustomUpperFunction implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input;
+
+  @Output
+  VarCharHolder output;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+  }
+
+  public void eval() {
+
+    // get value
+    String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+    // convert to upper case
+    String outputValue = inputString.toUpperCase();
+
+    // put the output value into output buffer
+    output.buffer = buffer;
+    output.start = 0;
+    output.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java
new file mode 100644
index 0000000..0d5d149
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/java/org/apache/drill/udf/dynamic/LowerFunction.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.udf.dynamic;
+
+import io.netty.buffer.DrillBuf;
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import javax.inject.Inject;
+
+@FunctionTemplate(
+    name="lower",
+    scope = FunctionTemplate.FunctionScope.SIMPLE,
+    nulls = FunctionTemplate.NullHandling.NULL_IF_NULL
+)
+public class LowerFunction implements DrillSimpleFunc {
+
+  @Param
+  VarCharHolder input;
+
+  @Output
+  VarCharHolder output;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+  }
+
+  public void eval() {
+
+    // get value
+    String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end, input.buffer);
+
+    // convert to lower case
+    String outputValue = inputString.toLowerCase();
+
+    // put the output value into output buffer
+    output.buffer = buffer;
+    output.start = 0;
+    output.end = outputValue.getBytes().length;
+    buffer.setBytes(0, outputValue.getBytes());
+
+  }
+}
+
diff --git a/exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf b/exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf
new file mode 100644
index 0000000..0b2948a
--- /dev/null
+++ b/exec/java-exec/src/test/resources/drill-udf/src/main/resources/drill-module.conf
@@ -0,0 +1 @@
+drill.classpath.scanning.packages += "org.apache.drill.udf.dynamic"
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar
deleted file mode 100644
index b5965c9..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar
deleted file mode 100644
index 7cd2eeb..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF-1.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar
deleted file mode 100644
index 1c8308c..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar
deleted file mode 100644
index 3522c1e..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF-2.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar
deleted file mode 100644
index f6b250e..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar
deleted file mode 100644
index 4b5ef8b..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF-overloading-1.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar
deleted file mode 100644
index fa449e2..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar
deleted file mode 100644
index 8945fe7..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_Copy-1.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar
deleted file mode 100644
index b19ade6..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar
deleted file mode 100644
index 56a649c..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_DupFunc-1.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar
deleted file mode 100644
index 2a82dc9..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar
deleted file mode 100644
index 11ed28b..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_Empty-1.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar
deleted file mode 100644
index dbc97dd..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar b/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar
deleted file mode 100644
index cba65da..0000000
Binary files a/exec/java-exec/src/test/resources/jars/DrillUDF_NoMarkerFile-1.0.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar b/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar
deleted file mode 100644
index 583b1c4..0000000
Binary files a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0-sources.jar and /dev/null differ
diff --git a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar b/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar
deleted file mode 100644
index 42df4a4..0000000
Binary files a/exec/java-exec/src/test/resources/jars/v2/DrillUDF-1.0.jar and /dev/null differ

-- 
To stop receiving notification emails like this one, please contact
amansinha@apache.org.