You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by ti...@apache.org on 2018/08/01 18:36:11 UTC

[drill] branch master updated (b774413 -> efd6d29)

This is an automated email from the ASF dual-hosted git repository.

timothyfarkas pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from b774413  DRILL-6650: Remove stray semicolon in imports for PrintingResultsListener.
     new 0903a1e  DRILL-6634: Add udf module under contrib directory and move some udfs into it
     new ee84164  DRILL-6631: Streaming agg causes queries with Lateral and Unnest to return incorrect results.
     new efd6d29  DRILL-5796 : implement ROWS_MATCH enum to keep inside rowgroup the filter result information, used to prune the filter if all rows match.

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 contrib/gis/pom.xml                                | 134 -----
 contrib/gis/src/main/resources/drill-module.conf   |   0
 .../drill/exec/expr/fn/impl/gis/GISTestSuite.java  |  30 --
 contrib/pom.xml                                    |   2 +-
 contrib/udfs/pom.xml                               | 136 +++++
 .../sample-data/CA-cities-with-nulls.csv           |   0
 contrib/{gis => udfs}/sample-data/CA-cities.csv    |   0
 contrib/{gis => udfs}/sample-data/polygons.tsv     |   0
 .../com/esri/core/geometry/VertexGeomAccessor.java |   0
 .../apache/drill/exec/udfs}/CryptoFunctions.java   |  16 +-
 .../apache/drill/exec/udfs}/NetworkFunctions.java  |  55 +-
 .../apache/drill/exec/udfs}/PhoneticFunctions.java |  16 +-
 .../drill/exec/udfs}/StringDistanceFunctions.java  |  36 +-
 .../apache/drill/exec/udfs}/gis/STAsGeoJSON.java   |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STAsJson.java  |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STAsText.java  |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STBuffer.java  |   9 +-
 .../apache/drill/exec/udfs}/gis/STContains.java    |  13 +-
 .../org/apache/drill/exec/udfs}/gis/STCrosses.java |  13 +-
 .../org/apache/drill/exec/udfs}/gis/STDWithin.java |  11 +-
 .../apache/drill/exec/udfs}/gis/STDifference.java  |   9 +-
 .../apache/drill/exec/udfs}/gis/STDisjoint.java    |  13 +-
 .../apache/drill/exec/udfs}/gis/STDistance.java    |   9 +-
 .../apache/drill/exec/udfs}/gis/STEnvelope.java    |   9 +-
 .../org/apache/drill/exec/udfs}/gis/STEquals.java  |  13 +-
 .../drill/exec/udfs}/gis/STGeomFromText.java       |   7 +-
 .../drill/exec/udfs}/gis/STGeomFromTextSrid.java   |   7 +-
 .../apache/drill/exec/udfs}/gis/STIntersects.java  |  13 +-
 .../apache/drill/exec/udfs}/gis/STOverlaps.java    |  13 +-
 .../apache/drill/exec/udfs}/gis/STPointFunc.java   |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STRelate.java  |  13 +-
 .../org/apache/drill/exec/udfs}/gis/STTouches.java |  13 +-
 .../apache/drill/exec/udfs}/gis/STTransform.java   |  18 +-
 .../org/apache/drill/exec/udfs}/gis/STUnion.java   |   9 +-
 .../drill/exec/udfs}/gis/STUnionAggregate.java     |  44 +-
 .../org/apache/drill/exec/udfs}/gis/STWithin.java  |  11 +-
 .../org/apache/drill/exec/udfs}/gis/STXFunc.java   |   9 +-
 .../org/apache/drill/exec/udfs}/gis/STXMax.java    |  10 +-
 .../org/apache/drill/exec/udfs}/gis/STXMin.java    |  10 +-
 .../org/apache/drill/exec/udfs}/gis/STYFunc.java   |   9 +-
 .../org/apache/drill/exec/udfs}/gis/STYMax.java    |  10 +-
 .../org/apache/drill/exec/udfs}/gis/STYMin.java    |  10 +-
 contrib/udfs/src/main/resources/drill-module.conf  |  20 +
 .../drill/exec/udfs}/TestCryptoFunctions.java      |   2 +-
 .../drill/exec/udfs}/TestNetworkFunctions.java     |   6 +-
 .../drill/exec/udfs}/TestPhoneticFunctions.java    |   5 +-
 .../exec/udfs}/TestStringDistanceFunctions.java    |   2 +-
 .../exec/udfs}/gis/TestGeometryFunctions.java      |  14 +-
 distribution/pom.xml                               |   8 +-
 distribution/src/assemble/bin.xml                  |   2 +-
 exec/java-exec/pom.xml                             |  21 -
 .../exec/expr/stat/ParquetBooleanPredicate.java    |  48 +-
 .../exec/expr/stat/ParquetComparisonPredicate.java |  78 +--
 .../exec/expr/stat/ParquetFilterPredicate.java     |  13 +-
 .../drill/exec/expr/stat/ParquetIsPredicate.java   | 125 +++--
 .../drill/exec/expr/stat/RangeExprEvaluator.java   |  33 +-
 .../physical/impl/aggregate/StreamingAggBatch.java |  39 +-
 .../impl/aggregate/StreamingAggTemplate.java       |   2 +-
 .../store/parquet/AbstractParquetGroupScan.java    |   8 +-
 .../exec/store/parquet/ParquetPushDownFilter.java  |  18 +-
 .../store/parquet/ParquetRGFilterEvaluator.java    |  52 +-
 .../drill/exec/store/parquet/RowGroupInfo.java     |   5 +
 .../parquet/stat/ParquetFooterStatCollector.java   |   2 +-
 .../parquet/stat/ParquetMetaStatCollector.java     |   2 +-
 .../impl/agg/TestStreamingAggEmitOutcome.java      | 553 +++++++++++++++++++++
 .../store/parquet/TestParquetFilterPushDown.java   | 335 +++++++++----
 .../test/resources/parquet/multirowgroup2.parquet  | Bin 0 -> 598 bytes
 .../parquet/multirowgroupwithNulls.parquet         | Bin 0 -> 2063 bytes
 .../resources/parquetFilterPush/tfTbl/ff1.parquet  | Bin 0 -> 251 bytes
 .../resources/parquetFilterPush/tfTbl/ft0.parquet  | Bin 0 -> 251 bytes
 .../resources/parquetFilterPush/tfTbl/tt1.parquet  | Bin 0 -> 251 bytes
 exec/jdbc-all/pom.xml                              |   8 -
 72 files changed, 1414 insertions(+), 745 deletions(-)
 delete mode 100644 contrib/gis/pom.xml
 delete mode 100644 contrib/gis/src/main/resources/drill-module.conf
 delete mode 100644 contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/GISTestSuite.java
 create mode 100644 contrib/udfs/pom.xml
 rename contrib/{gis => udfs}/sample-data/CA-cities-with-nulls.csv (100%)
 rename contrib/{gis => udfs}/sample-data/CA-cities.csv (100%)
 rename contrib/{gis => udfs}/sample-data/polygons.tsv (100%)
 rename contrib/{gis => udfs}/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java (100%)
 rename {exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl => contrib/udfs/src/main/java/org/apache/drill/exec/udfs}/CryptoFunctions.java (98%)
 rename {exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl => contrib/udfs/src/main/java/org/apache/drill/exec/udfs}/NetworkFunctions.java (96%)
 rename {exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl => contrib/udfs/src/main/java/org/apache/drill/exec/udfs}/PhoneticFunctions.java (98%)
 rename {exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl => contrib/udfs/src/main/java/org/apache/drill/exec/udfs}/StringDistanceFunctions.java (93%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STAsGeoJSON.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STAsJson.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STAsText.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STBuffer.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STContains.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STCrosses.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STDWithin.java (93%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STDifference.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STDisjoint.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STDistance.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STEnvelope.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STEquals.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STGeomFromText.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STGeomFromTextSrid.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STIntersects.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STOverlaps.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STPointFunc.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STRelate.java (95%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STTouches.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STTransform.java (92%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STUnion.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STUnionAggregate.java (77%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STWithin.java (94%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STXFunc.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STXMax.java (95%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STXMin.java (95%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STYFunc.java (97%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STYMax.java (95%)
 rename contrib/{gis/src/main/java/org/apache/drill/exec/expr/fn/impl => udfs/src/main/java/org/apache/drill/exec/udfs}/gis/STYMin.java (95%)
 create mode 100644 contrib/udfs/src/main/resources/drill-module.conf
 rename {exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl => contrib/udfs/src/test/java/org/apache/drill/exec/udfs}/TestCryptoFunctions.java (98%)
 rename {exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl => contrib/udfs/src/test/java/org/apache/drill/exec/udfs}/TestNetworkFunctions.java (96%)
 rename {exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl => contrib/udfs/src/test/java/org/apache/drill/exec/udfs}/TestPhoneticFunctions.java (96%)
 rename {exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl => contrib/udfs/src/test/java/org/apache/drill/exec/udfs}/TestStringDistanceFunctions.java (98%)
 rename contrib/{gis/src/test/java/org/apache/drill/exec/expr/fn/impl => udfs/src/test/java/org/apache/drill/exec/udfs}/gis/TestGeometryFunctions.java (95%)
 create mode 100644 exec/java-exec/src/test/resources/parquet/multirowgroup2.parquet
 create mode 100644 exec/java-exec/src/test/resources/parquet/multirowgroupwithNulls.parquet
 create mode 100644 exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ff1.parquet
 create mode 100644 exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ft0.parquet
 create mode 100644 exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/tt1.parquet


[drill] 01/03: DRILL-6634: Add udf module under contrib directory and move some udfs into it

Posted by ti...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

timothyfarkas pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 0903a1edb5531d3d28104191b35b57db3a99d5da
Author: Arina Ielchiieva <ar...@gmail.com>
AuthorDate: Wed Jul 25 20:28:45 2018 +0300

    DRILL-6634: Add udf module under contrib directory and move some udfs into it
    
    1. Created new contrib/udf module.
    2. Moved distance, phonetic, networking, crypto functions from java-exec to contrib/udf module.
    3. Moved functions from gis module to contrib/udf module. Removed gis module.
    4. Removed unnecessary dependencies from java-exec module.
    5. Minor refactoring of moved functions code.
    
    closes #1403
---
 contrib/gis/pom.xml                                | 134 --------------------
 contrib/gis/src/main/resources/drill-module.conf   |   0
 .../drill/exec/expr/fn/impl/gis/GISTestSuite.java  |  30 -----
 contrib/pom.xml                                    |   2 +-
 contrib/udfs/pom.xml                               | 136 +++++++++++++++++++++
 .../sample-data/CA-cities-with-nulls.csv           |   0
 contrib/{gis => udfs}/sample-data/CA-cities.csv    |   0
 contrib/{gis => udfs}/sample-data/polygons.tsv     |   0
 .../com/esri/core/geometry/VertexGeomAccessor.java |   0
 .../apache/drill/exec/udfs}/CryptoFunctions.java   |  16 +--
 .../apache/drill/exec/udfs}/NetworkFunctions.java  |  55 +--------
 .../apache/drill/exec/udfs}/PhoneticFunctions.java |  16 +--
 .../drill/exec/udfs}/StringDistanceFunctions.java  |  36 ++----
 .../apache/drill/exec/udfs}/gis/STAsGeoJSON.java   |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STAsJson.java  |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STAsText.java  |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STBuffer.java  |   9 +-
 .../apache/drill/exec/udfs}/gis/STContains.java    |  13 +-
 .../org/apache/drill/exec/udfs}/gis/STCrosses.java |  13 +-
 .../org/apache/drill/exec/udfs}/gis/STDWithin.java |  11 +-
 .../apache/drill/exec/udfs}/gis/STDifference.java  |   9 +-
 .../apache/drill/exec/udfs}/gis/STDisjoint.java    |  13 +-
 .../apache/drill/exec/udfs}/gis/STDistance.java    |   9 +-
 .../apache/drill/exec/udfs}/gis/STEnvelope.java    |   9 +-
 .../org/apache/drill/exec/udfs}/gis/STEquals.java  |  13 +-
 .../drill/exec/udfs}/gis/STGeomFromText.java       |   7 +-
 .../drill/exec/udfs}/gis/STGeomFromTextSrid.java   |   7 +-
 .../apache/drill/exec/udfs}/gis/STIntersects.java  |  13 +-
 .../apache/drill/exec/udfs}/gis/STOverlaps.java    |  13 +-
 .../apache/drill/exec/udfs}/gis/STPointFunc.java   |   7 +-
 .../org/apache/drill/exec/udfs}/gis/STRelate.java  |  13 +-
 .../org/apache/drill/exec/udfs}/gis/STTouches.java |  13 +-
 .../apache/drill/exec/udfs}/gis/STTransform.java   |  18 +--
 .../org/apache/drill/exec/udfs}/gis/STUnion.java   |   9 +-
 .../drill/exec/udfs}/gis/STUnionAggregate.java     |  44 ++++---
 .../org/apache/drill/exec/udfs}/gis/STWithin.java  |  11 +-
 .../org/apache/drill/exec/udfs}/gis/STXFunc.java   |   9 +-
 .../org/apache/drill/exec/udfs}/gis/STXMax.java    |  10 +-
 .../org/apache/drill/exec/udfs}/gis/STXMin.java    |  10 +-
 .../org/apache/drill/exec/udfs}/gis/STYFunc.java   |   9 +-
 .../org/apache/drill/exec/udfs}/gis/STYMax.java    |  10 +-
 .../org/apache/drill/exec/udfs}/gis/STYMin.java    |  10 +-
 contrib/udfs/src/main/resources/drill-module.conf  |  20 +++
 .../drill/exec/udfs}/TestCryptoFunctions.java      |   2 +-
 .../drill/exec/udfs}/TestNetworkFunctions.java     |   6 +-
 .../drill/exec/udfs}/TestPhoneticFunctions.java    |   5 +-
 .../exec/udfs}/TestStringDistanceFunctions.java    |   2 +-
 .../exec/udfs}/gis/TestGeometryFunctions.java      |  14 ++-
 distribution/pom.xml                               |   8 +-
 distribution/src/assemble/bin.xml                  |   2 +-
 exec/java-exec/pom.xml                             |  21 ----
 exec/jdbc-all/pom.xml                              |   8 --
 52 files changed, 331 insertions(+), 515 deletions(-)

diff --git a/contrib/gis/pom.xml b/contrib/gis/pom.xml
deleted file mode 100644
index 8a37b6c..0000000
--- a/contrib/gis/pom.xml
+++ /dev/null
@@ -1,134 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
-
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<parent>
-		<artifactId>drill-contrib-parent</artifactId>
-		<groupId>org.apache.drill.contrib</groupId>
-		<version>1.15.0-SNAPSHOT</version>
-	</parent>
-
-	<artifactId>drill-gis</artifactId>
-
-	<name>contrib/drill-gis-plugin</name>
-
-	<properties>
-		<gis.TestSuite>**/GISTestSuite.class</gis.TestSuite>
-	</properties>
-
-	<dependencies>
-		<dependency>
-			<groupId>org.apache.drill.exec</groupId>
-			<artifactId>drill-java-exec</artifactId>
-			<version>${project.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>com.esri.geometry</groupId>
-			<artifactId>esri-geometry-api</artifactId>
-			<version>2.0.0</version>
-		</dependency>
-		<dependency>
-		    <groupId>org.osgeo</groupId>
-		    <artifactId>proj4j</artifactId>
-		    <version>0.1.0</version>
-		</dependency>
-		<!-- Test dependencies -->
-		<dependency>
-			<groupId>org.apache.drill.exec</groupId>
-			<artifactId>drill-java-exec</artifactId>
-			<classifier>tests</classifier>
-			<version>${project.version}</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.drill</groupId>
-			<artifactId>drill-common</artifactId>
-			<classifier>tests</classifier>
-			<version>${project.version}</version>
-			<scope>test</scope>
-		</dependency>
-	</dependencies>
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-surefire-plugin</artifactId>
-				<configuration>
-					<includes>
-						<include>${gis.TestSuite}</include>
-					</includes>
-					<systemProperties>
-						<property>
-							<name>logback.log.dir</name>
-							<value>${project.build.directory}/surefire-reports</value>
-						</property>
-					</systemProperties>
-				</configuration>
-			</plugin>
-			<plugin>
-				<artifactId>maven-resources-plugin</artifactId>
-				<executions>
-					<execution>
-						<id>copy-java-sources</id>
-						<phase>process-sources</phase>
-						<goals>
-							<goal>copy-resources</goal>
-						</goals>
-						<configuration>
-							<outputDirectory>${basedir}/target/classes/org/apache/drill/exec/expr/fn/impl</outputDirectory>
-							<resources>
-								<resource>
-									<directory>src/main/java/org/apache/drill/exec/expr/fn/impl</directory>
-									<filtering>true</filtering>
-								</resource>
-								<resource>
-									<directory>src/test/java</directory>
-									<filtering>true</filtering>
-								</resource>
-								<resource>
-									<directory>target/generated-sources</directory>
-									<!-- <include>*/org</include> -->
-									<filtering>true</filtering>
-								</resource>
-							</resources>
-						</configuration>
-					</execution>
-					<execution>
-						<id>copy-gis-sample-data</id>
-						<phase>process-sources</phase>
-						<goals>
-							<goal>copy-resources</goal>
-						</goals>
-						<configuration>
-							<outputDirectory>${project.build.directory}/classes/sample-data</outputDirectory>
-							<resources>
-								<resource>
-									<directory>sample-data</directory>
-									<filtering>false</filtering>
-								</resource>
-							</resources>
-						</configuration>
-					</execution>
-				</executions>
-			</plugin>
-		</plugins>
-	</build>
-</project>
diff --git a/contrib/gis/src/main/resources/drill-module.conf b/contrib/gis/src/main/resources/drill-module.conf
deleted file mode 100644
index e69de29..0000000
diff --git a/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/GISTestSuite.java b/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/GISTestSuite.java
deleted file mode 100644
index 07521c3..0000000
--- a/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/GISTestSuite.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import org.junit.runner.RunWith;
-import org.junit.runners.Suite;
-import org.junit.runners.Suite.SuiteClasses;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@RunWith(Suite.class)
-@SuiteClasses({ TestGeometryFunctions.class })
-public class GISTestSuite {
-  private static final Logger logger = LoggerFactory.getLogger(GISTestSuite.class);
-}
diff --git a/contrib/pom.xml b/contrib/pom.xml
index 8b57664..a96aa42 100644
--- a/contrib/pom.xml
+++ b/contrib/pom.xml
@@ -45,7 +45,7 @@
     <module>storage-opentsdb</module>
     <module>sqlline</module>
     <module>data</module>
-    <module>gis</module>
+    <module>udfs</module>
   </modules>
 
   <profiles>
diff --git a/contrib/udfs/pom.xml b/contrib/udfs/pom.xml
new file mode 100644
index 0000000..44987fc
--- /dev/null
+++ b/contrib/udfs/pom.xml
@@ -0,0 +1,136 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+
+<parent>
+    <artifactId>drill-contrib-parent</artifactId>
+    <groupId>org.apache.drill.contrib</groupId>
+    <version>1.15.0-SNAPSHOT</version>
+  </parent>
+
+  <artifactId>drill-udfs</artifactId>
+  <name>contrib/drill-udfs</name>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>drill-java-exec</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-net</groupId>
+      <artifactId>commons-net</artifactId>
+      <version>3.6</version>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-validator</groupId>
+      <artifactId>commons-validator</artifactId>
+      <version>1.6</version>
+      <exclusions>
+        <exclusion>
+          <groupId>commons-logging</groupId>
+          <artifactId>commons-logging</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-text</artifactId>
+      <version>1.4</version>
+    </dependency>
+
+    <dependency>
+      <groupId>com.esri.geometry</groupId>
+      <artifactId>esri-geometry-api</artifactId>
+      <version>2.2.0</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.osgeo</groupId>
+      <artifactId>proj4j</artifactId>
+      <version>0.1.0</version>
+    </dependency>
+
+    <!-- Test dependencies -->
+    <dependency>
+      <groupId>org.apache.drill.exec</groupId>
+      <artifactId>drill-java-exec</artifactId>
+      <classifier>tests</classifier>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.drill</groupId>
+      <artifactId>drill-common</artifactId>
+      <classifier>tests</classifier>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-resources-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-java-sources</id>
+            <phase>process-sources</phase>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${basedir}/target/classes/org/apache/drill/exec/udfs</outputDirectory>
+              <resources>
+                <resource>
+                  <directory>src/main/java/org/apache/drill/exec/udfs</directory>
+                  <filtering>true</filtering>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+          <execution>
+            <id>copy-udf-sample-data</id>
+            <phase>process-sources</phase>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${project.build.directory}/classes/sample-data</outputDirectory>
+              <resources>
+                <resource>
+                  <directory>sample-data</directory>
+                  <filtering>false</filtering>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>
\ No newline at end of file
diff --git a/contrib/gis/sample-data/CA-cities-with-nulls.csv b/contrib/udfs/sample-data/CA-cities-with-nulls.csv
similarity index 100%
rename from contrib/gis/sample-data/CA-cities-with-nulls.csv
rename to contrib/udfs/sample-data/CA-cities-with-nulls.csv
diff --git a/contrib/gis/sample-data/CA-cities.csv b/contrib/udfs/sample-data/CA-cities.csv
similarity index 100%
rename from contrib/gis/sample-data/CA-cities.csv
rename to contrib/udfs/sample-data/CA-cities.csv
diff --git a/contrib/gis/sample-data/polygons.tsv b/contrib/udfs/sample-data/polygons.tsv
similarity index 100%
rename from contrib/gis/sample-data/polygons.tsv
rename to contrib/udfs/sample-data/polygons.tsv
diff --git a/contrib/gis/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java b/contrib/udfs/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java
similarity index 100%
rename from contrib/gis/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java
rename to contrib/udfs/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CryptoFunctions.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
similarity index 98%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CryptoFunctions.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
index 68c4753..bc6f23c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/CryptoFunctions.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/CryptoFunctions.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
@@ -29,16 +29,11 @@ import javax.crypto.Cipher;
 import javax.inject.Inject;
 
 public class CryptoFunctions {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CryptoFunctions.class);
-
-  private CryptoFunctions() {
-  }
 
   /**
    * This class returns the md2 digest of a given input string.
    *  Usage is SELECT md2( <input string> ) FROM ...
    */
-
   @FunctionTemplate(name = "md2", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class MD2Function implements DrillSimpleFunc {
 
@@ -74,7 +69,6 @@ public class CryptoFunctions {
    *  Usage is shown below:
    *  select md5( 'testing' ) from (VALUES(1));
    */
-
   @FunctionTemplate(name = "md5", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class MD5Function implements DrillSimpleFunc {
 
@@ -112,7 +106,6 @@ public class CryptoFunctions {
    *
    * > select sha1( 'testing' ) from (VALUES(1));
    */
-
   @FunctionTemplate(names = {"sha", "sha1"}, scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class SHA1Function implements DrillSimpleFunc {
 
@@ -150,7 +143,6 @@ public class CryptoFunctions {
    * or NULL if the argument was NULL. Note that sha2() and sha256() are aliases for the same function.
    * > select sha2( 'testing' ) from (VALUES(1));
    */
-
   @FunctionTemplate(names = {"sha256", "sha2"}, scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class SHA256Function implements DrillSimpleFunc {
 
@@ -189,7 +181,6 @@ public class CryptoFunctions {
    *  Usage is shown below:
    *  select sha384( 'testing' ) from (VALUES(1));
    */
-
   @FunctionTemplate(name = "sha384", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class SHA384Function implements DrillSimpleFunc {
 
@@ -227,8 +218,6 @@ public class CryptoFunctions {
    *  Usage is shown below:
    *  select sha512( 'testing' ) from (VALUES(1));
    */
-
-
   @FunctionTemplate(name = "sha512", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class SHA512Function implements DrillSimpleFunc {
 
@@ -267,8 +256,6 @@ public class CryptoFunctions {
    * binary string containing the encrypted output.
    * Usage:  SELECT aes_encrypt( 'encrypted_text', 'my_secret_key' ) AS aes FROM (VALUES(1));
    */
-
-
   @FunctionTemplate(name = "aes_encrypt", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class AESEncryptFunction implements DrillSimpleFunc {
 
@@ -329,7 +316,6 @@ public class CryptoFunctions {
    *  If either function argument is NULL, the function returns NULL.
    *  Usage:  SELECT aes_decrypt( <encrypted_text>, <key> ) FROM ...
    */
-
   @FunctionTemplate(name = "aes_decrypt", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class AESDecryptFunction implements DrillSimpleFunc {
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/NetworkFunctions.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java
similarity index 96%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/NetworkFunctions.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java
index 448e8b6..cf20527 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/NetworkFunctions.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/NetworkFunctions.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
@@ -29,10 +29,6 @@ import org.apache.drill.exec.expr.holders.VarCharHolder;
 import javax.inject.Inject;
 
 public class NetworkFunctions {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(NetworkFunctions.class);
-
-  private NetworkFunctions() {
-  }
 
   /**
    * This function takes two arguments, an input IPv4 and a CIDR, and returns true if the IP is in the given CIDR block
@@ -49,9 +45,6 @@ public class NetworkFunctions {
     @Output
     BitHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
     public void setup() {
     }
 
@@ -85,9 +78,6 @@ public class NetworkFunctions {
     @Output
     BigIntHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
     public void setup() {
     }
 
@@ -96,8 +86,7 @@ public class NetworkFunctions {
       String cidrString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(inputCIDR.start, inputCIDR.end, inputCIDR.buffer);
       org.apache.commons.net.util.SubnetUtils utils = new org.apache.commons.net.util.SubnetUtils(cidrString);
 
-      out.value = utils.getInfo().getAddressCount();
-
+      out.value = utils.getInfo().getAddressCountLong();
     }
 
   }
@@ -131,7 +120,6 @@ public class NetworkFunctions {
       out.start = 0;
       out.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
-
     }
 
   }
@@ -139,7 +127,6 @@ public class NetworkFunctions {
   /**
    * This function gets the netmask of the input CIDR block.
    */
-
   @FunctionTemplate(name = "netmask", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class NetmaskFunction implements DrillSimpleFunc {
 
@@ -166,7 +153,6 @@ public class NetworkFunctions {
       out.start = 0;
       out.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
-
     }
 
   }
@@ -200,7 +186,6 @@ public class NetworkFunctions {
       out.start = 0;
       out.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
-
     }
 
   }
@@ -234,7 +219,6 @@ public class NetworkFunctions {
       out.start = 0;
       out.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
-
     }
   }
 
@@ -264,13 +248,12 @@ public class NetworkFunctions {
       try {
         outputValue = java.net.URLEncoder.encode(url, "UTF-8");
       } catch (Exception e) {
-
+        // do nothing
       }
       outputString.buffer = buffer;
       outputString.start = 0;
       outputString.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
-
     }
   }
 
@@ -300,13 +283,12 @@ public class NetworkFunctions {
       try {
         outputValue = java.net.URLDecoder.decode(url, "UTF-8");
       } catch (Exception e) {
-
+        // do nothing
       }
       outputString.buffer = buffer;
       outputString.start = 0;
       outputString.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
-
     }
   }
 
@@ -314,7 +296,6 @@ public class NetworkFunctions {
   /**
    * This function converts a BigInt IPv4 into dotted decimal notation.  The opposite of inet_aton.
    */
-
   @FunctionTemplate(name = "inet_ntoa", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class InetNtoaFunction implements DrillSimpleFunc {
 
@@ -355,14 +336,11 @@ public class NetworkFunctions {
       out.end = outputValue.getBytes().length;
       buffer.setBytes(0, outputValue.getBytes());
     }
-
-
   }
 
   /**
    * This function returns true if a given IPv4 address is private, false if not.
    */
-
   @FunctionTemplate(name = "is_private_ip", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class IsPrivateIP implements DrillSimpleFunc {
 
@@ -372,10 +350,6 @@ public class NetworkFunctions {
     @Output
     BitHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
-
     public void setup() {
     }
 
@@ -385,22 +359,19 @@ public class NetworkFunctions {
 
       String[] ipAddressInArray = ipString.split("\\.");
 
-      int result = 0;
-
       int[] octets = new int[3];
 
       for (int i = 0; i < 3; i++) {
         octets[i] = Integer.parseInt(ipAddressInArray[i]);
       }
 
+      int result = 0;
       if (octets[0] == 192 && octets[1] == 168) {
         result = 1;
       } else if (octets[0] == 172 && octets[1] >= 16 && octets[1] <= 31) {
         result = 1;
       } else if (octets[0] == 10) {
         result = 1;
-      } else {
-        result = 0;
       }
 
       out.value = result;
@@ -423,10 +394,6 @@ public class NetworkFunctions {
     @Output
     BigIntHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
-
     public void setup() {
     }
 
@@ -462,10 +429,6 @@ public class NetworkFunctions {
     @Output
     BitHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
-
     public void setup() {
     }
 
@@ -484,7 +447,6 @@ public class NetworkFunctions {
           out.value = 0;
         }
       }
-
     }
   }
 
@@ -500,9 +462,6 @@ public class NetworkFunctions {
     @Output
     BitHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
     public void setup() {
     }
 
@@ -536,10 +495,6 @@ public class NetworkFunctions {
     @Output
     BitHolder out;
 
-    @Inject
-    DrillBuf buffer;
-
-
     public void setup() {
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/PhoneticFunctions.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/PhoneticFunctions.java
similarity index 98%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/PhoneticFunctions.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/PhoneticFunctions.java
index ee26bd3..55e465a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/PhoneticFunctions.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/PhoneticFunctions.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
@@ -27,10 +27,6 @@ import org.apache.drill.exec.expr.holders.VarCharHolder;
 import javax.inject.Inject;
 
 public class PhoneticFunctions {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PhoneticFunctions.class);
-
-  private PhoneticFunctions() {
-  }
 
   /**
    * The Caverphone function is a phonetic matching function.   This is an algorithm created by the Caversham Project at the University of Otago. It implements the Caverphone 1.0 algorithm.
@@ -38,7 +34,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage:  SELECT caverphone1( string ) FROM...
    */
-
   @FunctionTemplate(name = "caverphone1", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class Caverphone1Function implements DrillSimpleFunc {
 
@@ -74,7 +69,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage: SELECT caverphone2( string ) FROM...
    */
-
   @FunctionTemplate(name = "caverphone2", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class Caverphone2Function implements DrillSimpleFunc {
 
@@ -114,7 +108,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage:  SELECT cologne_phonetic( string ) FROM...
    */
-
   @FunctionTemplate(name = "cologne_phonetic", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class ColognePhoneticFunction implements DrillSimpleFunc {
 
@@ -159,7 +152,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage:  SELECT dm_soundex( string ) FROM...
    */
-
   @FunctionTemplate(name = "dm_soundex", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class DaitchMokotoffFunction implements DrillSimpleFunc {
 
@@ -194,7 +186,6 @@ public class PhoneticFunctions {
    * Match Rating Approach Phonetic Algorithm Developed by Western Airlines in 1977.
    * Usage:  SELECT match_rating_encoder( string ) FROM...
    */
-
   @FunctionTemplate(name = "match_rating_encoder", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class MatchRatingFunction implements DrillSimpleFunc {
 
@@ -231,7 +222,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage: SELECT nysiis(string) FROM...
    */
-
   @FunctionTemplate(name = "nysiis", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class NYSIISFunction implements DrillSimpleFunc {
 
@@ -266,7 +256,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage:  SELECT refined_soundex( string ) FROM...
    */
-
   @FunctionTemplate(name = "refined_soundex", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class RefinedSoundexFunction implements DrillSimpleFunc {
 
@@ -302,7 +291,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage:  SELECT soundex( string ) FROM...
    */
-
   @FunctionTemplate(name = "soundex", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class SoundexFunction implements DrillSimpleFunc {
 
@@ -338,7 +326,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage: SELECT metaphone( string ) FROM...
    */
-
   @FunctionTemplate(name = "metaphone", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class MetaphoneFunction implements DrillSimpleFunc {
 
@@ -375,7 +362,6 @@ public class PhoneticFunctions {
    * <p>
    * Usage: SELECT double_metaphone( string ) FROM...
    */
-
   @FunctionTemplate(name = "double_metaphone", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class DoubleMetaphoneFunction implements DrillSimpleFunc {
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringDistanceFunctions.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/StringDistanceFunctions.java
similarity index 93%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringDistanceFunctions.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/StringDistanceFunctions.java
index 0b02769..bf02758 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/StringDistanceFunctions.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/StringDistanceFunctions.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.drill.exec.expr.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
@@ -27,16 +27,11 @@ import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarCharHolder;
 
 public class StringDistanceFunctions {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StringDistanceFunctions.class);
-
-  private StringDistanceFunctions() {
-  }
 
   /**
    * This function calculates the cosine distance between two strings.
    * Usage:  SELECT cosine_distance( string1, string2 ) AS cosine_distance FROM...
    */
-
   @FunctionTemplate(name = "cosine_distance", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class CosineDistanceFunction implements DrillSimpleFunc {
 
@@ -64,8 +59,7 @@ public class StringDistanceFunctions {
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
 
-      double result = d.apply(input1, input2);
-      out.value = result;
+      out.value = d.apply(input1, input2);
     }
   }
 
@@ -80,7 +74,6 @@ public class StringDistanceFunctions {
    * <p>
    * Usage:  SELECT fuzzy_score( string1, string2 ) AS fuzzy_score FROM...
    */
-
   @FunctionTemplate(name = "fuzzy_score", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class FuzzyScoreFunction implements DrillSimpleFunc {
 
@@ -107,8 +100,7 @@ public class StringDistanceFunctions {
       String input1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput1.start, rawInput1.end, rawInput1.buffer);
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
-      double result = d.fuzzyScore(input1, input2);
-      out.value = result;
+      out.value = d.fuzzyScore(input1, input2);
     }
   }
 
@@ -121,8 +113,6 @@ public class StringDistanceFunctions {
    * <p>
    * Usage:  SELECT hamming_distance( string1, string2 ) FROM...
    */
-
-
   @FunctionTemplate(name = "hamming_distance", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class HammingDistanceFunction implements DrillSimpleFunc {
 
@@ -149,8 +139,7 @@ public class StringDistanceFunctions {
       String input1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput1.start, rawInput1.end, rawInput1.buffer);
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
-      double result = d.apply(input1, input2);
-      out.value = result;
+      out.value = d.apply(input1, input2);
     }
   }
 
@@ -165,8 +154,6 @@ public class StringDistanceFunctions {
    * <p>
    * Usage:  SELECT jaccard_distance( string1, string2 ) FROM ...
    */
-
-
   @FunctionTemplate(name = "jaccard_distance", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class JaccardDistanceFunction implements DrillSimpleFunc {
 
@@ -193,8 +180,7 @@ public class StringDistanceFunctions {
       String input1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput1.start, rawInput1.end, rawInput1.buffer);
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
-      double result = d.apply(input1, input2);
-      out.value = result;
+      out.value = d.apply(input1, input2);
     }
   }
 
@@ -210,7 +196,6 @@ public class StringDistanceFunctions {
    * <p>
    * Usage: SELECT jaro_distance( string1, string2 ) FROM...
    */
-
   @FunctionTemplate(name = "jaro_distance", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class JaroDistanceFunction implements DrillSimpleFunc {
 
@@ -237,8 +222,7 @@ public class StringDistanceFunctions {
       String input1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput1.start, rawInput1.end, rawInput1.buffer);
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
-      double result = d.apply(input1, input2);
-      out.value = result;
+      out.value = d.apply(input1, input2);
     }
   }
 
@@ -251,7 +235,6 @@ public class StringDistanceFunctions {
    * <p>
    * Usage: SELECT levenshtein_distance( string1, string2 ) FROM...
    */
-
   @FunctionTemplate(name = "levenshtein_distance", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class LevenstheinDistanceFunction implements DrillSimpleFunc {
 
@@ -278,8 +261,7 @@ public class StringDistanceFunctions {
       String input1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput1.start, rawInput1.end, rawInput1.buffer);
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
-      double result = d.apply(input1, input2);
-      out.value = result;
+      out.value = d.apply(input1, input2);
     }
   }
 
@@ -294,7 +276,6 @@ public class StringDistanceFunctions {
    * <p>
    * Usage:  SELECT longest_common_substring_distance( string1, string2 ) FROM...
    */
-
   @FunctionTemplate(name = "longest_common_substring_distance", scope = FunctionTemplate.FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
   public static class LongestCommonSubstringDistanceFunction implements DrillSimpleFunc {
 
@@ -321,8 +302,7 @@ public class StringDistanceFunctions {
       String input1 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput1.start, rawInput1.end, rawInput1.buffer);
       String input2 = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(rawInput2.start, rawInput2.end, rawInput2.buffer);
 
-      double result = d.apply(input1, input2);
-      out.value = result;
+      out.value = d.apply(input1, input2);
     }
   }
 
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsGeoJSON.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsGeoJSON.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsGeoJSON.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsGeoJSON.java
index 3f41504..4d01485 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsGeoJSON.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsGeoJSON.java
@@ -18,10 +18,9 @@
 /*
  * Wrapper for ESRI ST_AsGeoJson function to convert geometry to valid geojson
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -29,7 +28,7 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 import org.apache.drill.exec.expr.holders.VarCharHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_asgeojson", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsJson.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsJson.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsJson.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsJson.java
index e170833..090c78b 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsJson.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsJson.java
@@ -19,10 +19,9 @@
  * Wrapper for ESRI ST_AsJson to convert geometry into REST Json.
  * Emulates functionality from spatial-framework-for-hadoop.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -30,7 +29,7 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 import org.apache.drill.exec.expr.holders.VarCharHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_asjson", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsText.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsText.java
index 17b7858..f770872 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STAsText.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STAsText.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,7 +25,7 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 import org.apache.drill.exec.expr.holders.VarCharHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_astext", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STBuffer.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STBuffer.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STBuffer.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STBuffer.java
index a1d3af4..5ae0b4d 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STBuffer.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STBuffer.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
 * Returns a geometry that represents all points whose distance from this Geometry
 * is less than or equal to radius
 */
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STContains.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STContains.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STContains.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STContains.java
index 5204ce6..94f0464 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STContains.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STContains.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  Returns true if and only if no points of B lie in the exterior of A,
  and at least one point of the interior of B lies in the interior of A.
 */
@@ -59,8 +58,6 @@ public class STContains implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int contains = geom1.contains(geom2) ? 1 : 0;
-
-    out.value = contains;
+    out.value = geom1.contains(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STCrosses.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STCrosses.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STCrosses.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STCrosses.java
index 95d7aca..9bf678a 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STCrosses.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STCrosses.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns TRUE if the supplied geometries have some, but not all, interior points in common
  */
 @FunctionTemplate(name = "st_crosses", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -58,8 +57,6 @@ public class STCrosses implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int crosses = geom1.crosses(geom2) ? 1 : 0;
-
-    out.value = crosses;
+    out.value = geom1.crosses(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDWithin.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDWithin.java
similarity index 93%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDWithin.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDWithin.java
index 70f0947..b2ed091 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDWithin.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDWithin.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -27,7 +26,7 @@ import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_dwithin", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
@@ -61,8 +60,6 @@ public class STDWithin implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int isWithin = geom1.distance(geom2) <= distance ? 1 : 0;
-
-    out.value = isWithin;
+    out.value = geom1.distance(geom2) <= distance ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDifference.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDifference.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDifference.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDifference.java
index 4906880..bb51f0e 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDifference.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDifference.java
@@ -15,19 +15,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Given geometries A and B, this function returns a geometry that represents
  * the part of geometry A that does not intersect with geometry B
  */
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDisjoint.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDisjoint.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDisjoint.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDisjoint.java
index 8a34241..5976e17 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDisjoint.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDisjoint.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns TRUE if two Geometries do not "spatially intersect" - if they do not share any space
  */
 @FunctionTemplate(name = "st_disjoint", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -58,8 +57,6 @@ public class STDisjoint implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int isDisjoint = geom1.disjoint(geom2) ? 1 : 0;
-
-    out.value = isDisjoint;
+    out.value = geom1.disjoint(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDistance.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDistance.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDistance.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDistance.java
index 9415f39..6235cd0 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDistance.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STDistance.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * For geometry type Returns the 2D Cartesian distance between two geometries in projected units (based on spatial ref).
  * For geography type defaults to return minimum geodesic distance between two geographies in meters
  */
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEnvelope.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STEnvelope.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEnvelope.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STEnvelope.java
index 8286224..8e73bcd 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEnvelope.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STEnvelope.java
@@ -15,19 +15,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns a geometry representing the double precision (float8) bounding box of the supplied geometry.
  * The polygon is defined by the corner points of the bounding box ((MINX, MINY), (MINX, MAXY), (MAXX, MAXY), (MAXX, MINY), (MINX, MINY))
  */
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEquals.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STEquals.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEquals.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STEquals.java
index b5e22d6..81895a8 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEquals.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STEquals.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns true if the given geometries represent the same geometry. Directionality is ignored
  */
 @FunctionTemplate(name = "st_equals", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -58,8 +57,6 @@ public class STEquals implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int equals = geom1.equals(geom2) ? 1 : 0;
-
-    out.value = equals;
+    out.value = geom1.Equals(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STGeomFromText.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STGeomFromText.java
index 042046e..41044b5 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STGeomFromText.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,7 +25,7 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STGeomFromTextSrid.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STGeomFromTextSrid.java
index 9a7432e..2ff2573 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STGeomFromTextSrid.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -27,7 +26,7 @@ import org.apache.drill.exec.expr.holders.NullableIntHolder;
 import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STIntersects.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STIntersects.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STIntersects.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STIntersects.java
index 9e152c5..621581d 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STIntersects.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STIntersects.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns TRUE if the Geometries/Geography "spatially intersect in 2D" - (share any portion of space) and FALSE if they don't (they are Disjoint)
  */
 @FunctionTemplate(name = "st_intersects", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -57,8 +56,6 @@ public class STIntersects implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int intersects = geom1.intersects(geom2) ? 1 : 0;
-
-    out.value = intersects;
+    out.value = geom1.intersects(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STOverlaps.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STOverlaps.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STOverlaps.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STOverlaps.java
index 7ae770d..5f9a162 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STOverlaps.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STOverlaps.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns TRUE if the Geometries share space, are of the same dimension, but are not completely contained by each other
  */
 @FunctionTemplate(name = "st_overlaps", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -58,8 +57,6 @@ public class STOverlaps implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int overlaps = geom1.overlaps(geom2) ? 1 : 0;
-
-    out.value = overlaps;
+    out.value = geom1.overlaps(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STPointFunc.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STPointFunc.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STPointFunc.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STPointFunc.java
index 2024e3b..1b34f32 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STPointFunc.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STPointFunc.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,7 +25,7 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_point", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STRelate.java
similarity index 95%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STRelate.java
index 9a1c64b..fa81ee7 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STRelate.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -27,9 +26,9 @@ import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 import org.apache.drill.exec.expr.holders.VarCharHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  *  Returns true if this Geometry is spatially related to anotherGeometry, by testing for intersections between
  *  the Interior, Boundary and Exterior of the two geometries as specified by the values in the intersectionMatrixPattern.
  *  If no intersectionMatrixPattern is passed in, then returns the maximum intersectionMatrixPattern that relates the 2 geometries
@@ -66,8 +65,6 @@ public class STRelate implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int relates = geom1.relate(geom2, matrix) ? 1 : 0;
-
-    out.value = relates;
+    out.value = geom1.relate(geom2, matrix) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTouches.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STTouches.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTouches.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STTouches.java
index 79ad0c7..0562aeb 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTouches.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STTouches.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns TRUE if the geometries have at least one point in common, but their interiors do not intersect
  */
 @FunctionTemplate(name = "st_touches", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -58,8 +57,6 @@ public class STTouches implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int touches = geom1.touches(geom2) ? 1 : 0;
-
-    out.value = touches;
+    out.value = geom1.touches(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STTransform.java
similarity index 92%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STTransform.java
index c2788c1..1b5c824 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STTransform.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,12 +25,11 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.annotations.Workspace;
 import org.apache.drill.exec.expr.holders.NullableIntHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
-import org.osgeo.proj4j.CRSFactory;
 import org.osgeo.proj4j.CoordinateTransform;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Return a new geometry with its coordinates transformed to a different spatial reference
  */
 @FunctionTemplate(name = "st_transform", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -50,9 +48,6 @@ public class STTransform implements DrillSimpleFunc {
   CoordinateTransform transform;
 
   @Workspace
-  CRSFactory crsFactory;
-
-  @Workspace
   int sridTgt;
 
   @Output
@@ -78,12 +73,11 @@ public class STTransform implements DrillSimpleFunc {
     com.esri.core.geometry.ogc.OGCGeometry geomSrc = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    //int code = Integer.parseInt(transform.getTargetCRS().getName().substring(5, 9));//now sridTgt
     org.osgeo.proj4j.ProjCoordinate result = new org.osgeo.proj4j.ProjCoordinate();
     com.esri.core.geometry.SpatialReference sr = com.esri.core.geometry.SpatialReference.create(sridTgt);
-    java.nio.ByteBuffer geomBytes = null;
+    java.nio.ByteBuffer geomBytes;
 
-    if (geomSrc != null && geomSrc.geometryType().equals("Point")) {
+    if (geomSrc.geometryType().equals("Point")) {
       com.esri.core.geometry.ogc.OGCPoint pointGeom = (com.esri.core.geometry.ogc.OGCPoint) geomSrc;
       result = transform.transform(new org.osgeo.proj4j.ProjCoordinate(pointGeom.X(), pointGeom.Y()), result);
 
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnion.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STUnion.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnion.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STUnion.java
index 907c6da..24e934a 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnion.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STUnion.java
@@ -15,19 +15,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns a geometry that represents the point set union of the Geometries
  */
 @FunctionTemplate(name = "st_union", scope = FunctionTemplate.FunctionScope.SIMPLE,
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STUnionAggregate.java
similarity index 77%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STUnionAggregate.java
index bcb7615..32b90ba 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STUnionAggregate.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillAggFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -30,13 +29,14 @@ import org.apache.drill.exec.expr.holders.NullableVarBinaryHolder;
 import org.apache.drill.exec.expr.holders.ObjectHolder;
 import org.apache.drill.exec.expr.holders.UInt1Holder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns a geometry that represents the point set union of the Geometries
  */
 @FunctionTemplate(name = "st_unionaggregate", scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
 public class STUnionAggregate implements DrillAggFunc {
+
   @Param NullableVarBinaryHolder in;
   @Workspace ObjectHolder value;
   @Workspace UInt1Holder init;
@@ -56,25 +56,23 @@ public class STUnionAggregate implements DrillAggFunc {
 
   @Override
   public void add() {
-    sout: {
-      if (in.isSet == 0) {
-        // processing nullable input and the value is null, so don't do anything...
-        break sout;
-      }
-      nonNullCount.value = 1;
-      java.util.ArrayList<com.esri.core.geometry.Geometry> tmp = (java.util.ArrayList<com.esri.core.geometry.Geometry>) value.obj;
 
-      com.esri.core.geometry.ogc.OGCGeometry geom;
-      geom = com.esri.core.geometry.ogc.OGCGeometry
-          .fromBinary(in.buffer.nioBuffer(in.start, in.end - in.start));
+    if (in.isSet == 0) {
+      // processing nullable input and the value is null, so don't do anything...
+      return;
+    }
+    nonNullCount.value = 1;
+    java.util.List<com.esri.core.geometry.Geometry> tmp = (java.util.ArrayList<com.esri.core.geometry.Geometry>) value.obj;
+
+    com.esri.core.geometry.ogc.OGCGeometry geom;
+    geom = com.esri.core.geometry.ogc.OGCGeometry.fromBinary(in.buffer.nioBuffer(in.start, in.end - in.start));
 
-      tmp.add(geom.getEsriGeometry());
+    tmp.add(geom.getEsriGeometry());
 
-      if(init.value == 0) {
-        init.value = 1;
-        srid.value = geom.SRID();
-      }
-    } // end of sout block
+    if (init.value == 0) {
+      init.value = 1;
+      srid.value = geom.SRID();
+    }
   }
 
   @Override
@@ -82,14 +80,14 @@ public class STUnionAggregate implements DrillAggFunc {
     if (nonNullCount.value > 0) {
       out.isSet = 1;
 
-      java.util.ArrayList<com.esri.core.geometry.Geometry> tmp = (java.util.ArrayList<com.esri.core.geometry.Geometry>) value.obj;
+      java.util.List<com.esri.core.geometry.Geometry> tmp = (java.util.ArrayList<com.esri.core.geometry.Geometry>) value.obj;
 
       com.esri.core.geometry.SpatialReference spatialRef = null;
       if (srid.value != 0){
         spatialRef = com.esri.core.geometry.SpatialReference.create(4326);
       }
       com.esri.core.geometry.Geometry[] geomArr =
-          (com.esri.core.geometry.Geometry[]) tmp.toArray( new com.esri.core.geometry.Geometry[0] );
+          (com.esri.core.geometry.Geometry[]) tmp.toArray(new com.esri.core.geometry.Geometry[0]);
       com.esri.core.geometry.Geometry geom = com.esri.core.geometry.GeometryEngine.union(geomArr, spatialRef);
 
       com.esri.core.geometry.ogc.OGCGeometry unionGeom = com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry(geom, spatialRef);
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STWithin.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STWithin.java
similarity index 94%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STWithin.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STWithin.java
index f229c63..ab82f0f 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STWithin.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STWithin.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,7 +25,7 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
 @FunctionTemplate(name = "st_within", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
@@ -55,8 +54,6 @@ public class STWithin implements DrillSimpleFunc {
     geom2 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int isWithin = geom1.within(geom2) ? 1 : 0;
-
-    out.value = isWithin;
+    out.value = geom1.within(geom2) ? 1 : 0;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXFunc.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXFunc.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXFunc.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXFunc.java
index 8986bbc..72b04bc 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXFunc.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXFunc.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Return the X coordinate of the point, or NaN if not available
  */
 @FunctionTemplate(name = "st_x", scope = FunctionTemplate.FunctionScope.SIMPLE,
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMax.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXMax.java
similarity index 95%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMax.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXMax.java
index ee8afa0..79cb679 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMax.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXMax.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns X maxima of a bounding box 2d or 3d or a geometry
  */
 @FunctionTemplate(name = "st_xmax", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -51,7 +50,6 @@ public class STXMax implements DrillSimpleFunc {
     geom1 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
     if (geom1.geometryType().equals("Point")) {
       out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).X();
     } else {
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMin.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXMin.java
similarity index 95%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMin.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXMin.java
index d527e6e..3119c87 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMin.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STXMin.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns X minima of a bounding box 2d or 3d or a geometry
  */
 @FunctionTemplate(name = "st_xmin", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -51,7 +50,6 @@ public class STXMin implements DrillSimpleFunc {
     geom1 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
     if (geom1.geometryType().equals("Point")) {
       out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).X();
     } else {
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYFunc.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYFunc.java
similarity index 97%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYFunc.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYFunc.java
index ce1771a..8799759 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYFunc.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYFunc.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Return the Y coordinate of the point, or NaN if not available
  */
 @FunctionTemplate(name = "st_y", scope = FunctionTemplate.FunctionScope.SIMPLE,
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMax.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYMax.java
similarity index 95%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMax.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYMax.java
index 4101e79..933536c 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMax.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYMax.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns Y maxima of a bounding box 2d or 3d or a geometry
  */
 @FunctionTemplate(name = "st_ymax", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -51,7 +50,6 @@ public class STYMax implements DrillSimpleFunc {
     geom1 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
     if (geom1.geometryType().equals("Point")) {
       out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).Y();
     } else {
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMin.java b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYMin.java
similarity index 95%
rename from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMin.java
rename to contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYMin.java
index 85fc7e4..2c8eab9 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMin.java
+++ b/contrib/udfs/src/main/java/org/apache/drill/exec/udfs/gis/STYMin.java
@@ -15,10 +15,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
-
-import javax.inject.Inject;
+package org.apache.drill.exec.udfs.gis;
 
+import io.netty.buffer.DrillBuf;
 import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
@@ -26,9 +25,9 @@ import org.apache.drill.exec.expr.annotations.Param;
 import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
-import io.netty.buffer.DrillBuf;
+import javax.inject.Inject;
 
-/*
+/**
  * Returns Y minima of a bounding box 2d or 3d or a geometry
  */
 @FunctionTemplate(name = "st_ymin", scope = FunctionTemplate.FunctionScope.SIMPLE,
@@ -51,7 +50,6 @@ public class STYMin implements DrillSimpleFunc {
     geom1 = com.esri.core.geometry.ogc.OGCGeometry
         .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
     if (geom1.geometryType().equals("Point")) {
       out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).Y();
     } else {
diff --git a/contrib/udfs/src/main/resources/drill-module.conf b/contrib/udfs/src/main/resources/drill-module.conf
new file mode 100644
index 0000000..741aac0
--- /dev/null
+++ b/contrib/udfs/src/main/resources/drill-module.conf
@@ -0,0 +1,20 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+//  This file tells Drill to consider this module when class path scanning.
+//  This file can also include any supplementary configuration information.
+//  This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
+
+drill.classpath.scanning.packages += "org.apache.drill.exec.udfs"
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestCryptoFunctions.java
similarity index 98%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java
rename to contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestCryptoFunctions.java
index d7d6047..2606c46 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestCryptoFunctions.java
+++ b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestCryptoFunctions.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlFunctionTest;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestNetworkFunctions.java
similarity index 96%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java
rename to contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestNetworkFunctions.java
index 8733c9a..1e056b0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestNetworkFunctions.java
+++ b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestNetworkFunctions.java
@@ -15,11 +15,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.fn.impl;
+package org.apache.drill.exec.udfs;
 
+import org.apache.drill.categories.SqlFunctionTest;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestNetworkFunctions extends BaseTestQuery {
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestPhoneticFunctions.java b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestPhoneticFunctions.java
similarity index 96%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestPhoneticFunctions.java
rename to contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestPhoneticFunctions.java
index 85bb135..d41435d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestPhoneticFunctions.java
+++ b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestPhoneticFunctions.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
@@ -23,7 +23,6 @@ import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterFixtureBuilder;
 import org.apache.drill.test.ClusterTest;
-import org.apache.drill.test.QueryResultSet;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
@@ -34,8 +33,6 @@ import static org.junit.Assert.assertEquals;
 @Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestPhoneticFunctions extends ClusterTest {
 
-  private QueryResultSet result;
-
   @Rule
   public final BaseDirTestWatcher baseDirTestWatcher = new BaseDirTestWatcher();
 
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestStringDistanceFunctions.java b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestStringDistanceFunctions.java
similarity index 98%
rename from exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestStringDistanceFunctions.java
rename to contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestStringDistanceFunctions.java
index 915c062..e90c0b6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestStringDistanceFunctions.java
+++ b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/TestStringDistanceFunctions.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.drill.exec.fn.impl;
+package org.apache.drill.exec.udfs;
 
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.categories.UnlikelyTest;
diff --git a/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/gis/TestGeometryFunctions.java
similarity index 95%
rename from contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java
rename to contrib/udfs/src/test/java/org/apache/drill/exec/udfs/gis/TestGeometryFunctions.java
index 379de5c..93617d9 100644
--- a/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java
+++ b/contrib/udfs/src/test/java/org/apache/drill/exec/udfs/gis/TestGeometryFunctions.java
@@ -15,16 +15,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.drill.exec.expr.fn.impl.gis;
+package org.apache.drill.exec.udfs.gis;
 
+import org.apache.drill.categories.SqlFunctionTest;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({UnlikelyTest.class, SqlFunctionTest.class})
 public class TestGeometryFunctions extends BaseTestQuery {
 
-  String wktPoint = "POINT (-121.895 37.339)";
-  String json = "{\"x\":-121.895,\"y\":37.339,\"spatialReference\":{\"wkid\":4326}}";
-  String geoJson = "{\"type\":\"Point\",\"coordinates\":[-121.895,37.339],"
+  private final String wktPoint = "POINT (-121.895 37.339)";
+  private final String json = "{\"x\":-121.895,\"y\":37.339,\"spatialReference\":{\"wkid\":4326}}";
+  private final String geoJson = "{\"type\":\"Point\",\"coordinates\":[-121.895,37.339],"
     + "\"crs\":{\"type\":\"name\",\"properties\":{\"name\":\"EPSG:4326\"}}}";
 
   @Test
@@ -292,7 +296,7 @@ public class TestGeometryFunctions extends BaseTestQuery {
   }
 
   @Test
-  public void testTransfromQuery() throws Exception {
+  public void testTransformQuery() throws Exception {
     double targetX = -71.1776848522251;
     double targetY = 42.3902896512902;
 
diff --git a/distribution/pom.xml b/distribution/pom.xml
index 19f41f2..35a274a 100644
--- a/distribution/pom.xml
+++ b/distribution/pom.xml
@@ -266,14 +266,14 @@
           <artifactId>drill-storage-hive-core</artifactId>
           <version>${project.version}</version>
         </dependency>
-        <dependency>
+	      <dependency>
           <groupId>org.apache.drill.contrib</groupId>
-          <artifactId>drill-gis</artifactId>
+          <artifactId>drill-storage-kafka</artifactId>
           <version>${project.version}</version>
         </dependency>
-	<dependency>
+        <dependency>
           <groupId>org.apache.drill.contrib</groupId>
-          <artifactId>drill-storage-kafka</artifactId>
+          <artifactId>drill-udfs</artifactId>
           <version>${project.version}</version>
         </dependency>
       </dependencies>
diff --git a/distribution/src/assemble/bin.xml b/distribution/src/assemble/bin.xml
index 712f3ec..1db6804 100644
--- a/distribution/src/assemble/bin.xml
+++ b/distribution/src/assemble/bin.xml
@@ -102,9 +102,9 @@
         <include>org.apache.drill.contrib:drill-format-mapr</include>
         <include>org.apache.drill.contrib:drill-jdbc-storage</include>
         <include>org.apache.drill.contrib:drill-kudu-storage</include>
-        <include>org.apache.drill.contrib:drill-gis</include>
         <include>org.apache.drill.contrib:drill-storage-kafka</include>
         <include>org.apache.drill.contrib:drill-opentsdb-storage</include>
+        <include>org.apache.drill.contrib:drill-udfs</include>
       </includes>
       <excludes>
         <exclude>org.apache.drill.contrib.storage-hive:drill-storage-hive-core:jar:tests</exclude>
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 6c4e296..7aab724 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -99,11 +99,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
-      <artifactId>commons-text</artifactId>
-      <version>1.4</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.commons</groupId>
       <artifactId>commons-math</artifactId>
       <version>2.2</version>
     </dependency>
@@ -355,22 +350,6 @@
       <version>2.9</version>
     </dependency>
     <dependency>
-      <groupId>commons-net</groupId>
-      <artifactId>commons-net</artifactId>
-      <version>3.6</version>
-    </dependency>
-    <dependency>
-      <groupId>commons-validator</groupId>
-      <artifactId>commons-validator</artifactId>
-      <version>1.4.1</version>
-      <exclusions>
-        <exclusion>
-          <groupId>commons-logging</groupId>
-          <artifactId>commons-logging</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
       <exclusions>
diff --git a/exec/jdbc-all/pom.xml b/exec/jdbc-all/pom.xml
index 453703a..91ad39a 100644
--- a/exec/jdbc-all/pom.xml
+++ b/exec/jdbc-all/pom.xml
@@ -165,14 +165,6 @@
           <artifactId>libpam4j</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>commons-net</groupId>
-          <artifactId>commons-net</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>commons-validator</groupId>
-          <artifactId>commons-validator</artifactId>
-        </exclusion>
-        <exclusion>
           <artifactId>metadata-extractor</artifactId>
           <groupId>com.drewnoakes</groupId>
         </exclusion>


[drill] 02/03: DRILL-6631: Streaming agg causes queries with Lateral and Unnest to return incorrect results.

Posted by ti...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

timothyfarkas pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit ee841643d0f2d746126f623d6a0c480b3011d38f
Author: Parth Chandra <pa...@apache.org>
AuthorDate: Fri Jul 20 17:24:38 2018 -0700

    DRILL-6631: Streaming agg causes queries with Lateral and Unnest to return incorrect results.
    
    This commit fixes issues with handling straight aggregates (no group by)
    with empty batches received between EMIT(s).
    
    closes #1399
---
 .../physical/impl/aggregate/StreamingAggBatch.java |  39 +-
 .../impl/aggregate/StreamingAggTemplate.java       |   2 +-
 .../impl/agg/TestStreamingAggEmitOutcome.java      | 553 +++++++++++++++++++++
 3 files changed, 573 insertions(+), 21 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
index 882c36d..70880c6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
@@ -188,16 +188,19 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
   public IterOutcome innerNext() {
 
     // if a special batch has been sent, we have no data in the incoming so exit early
-    if ( done || specialBatchSent) {
+    if (done || specialBatchSent) {
+      assert (sendEmit != true); // if special batch sent with emit then flag will not be set
       return NONE;
     }
 
     // We sent an OK_NEW_SCHEMA and also encountered the end of a data set. So we need to send
     // an EMIT with an empty batch now
     if (sendEmit) {
+      first = false; // first is set only in the case when we see a NONE after an empty first (and only) batch
       sendEmit = false;
       firstBatchForDataSet = true;
       recordCount = 0;
+      specialBatchSent = false;
       return EMIT;
     }
 
@@ -212,15 +215,19 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
       logger.debug("Next outcome of {}", lastKnownOutcome);
       switch (lastKnownOutcome) {
         case NONE:
-          if (firstBatchForDataSet && popConfig.getKeys().size() == 0) {
+
+          if (first && popConfig.getKeys().size() == 0) {
             // if we have a straight aggregate and empty input batch, we need to handle it in a different way
+            // Wewant to produce the special batch only if we got a NONE as the first outcome after
+            // OK_NEW_SCHEMA. If we get a NONE immediately after we see an EMIT, then we have already handled
+            // the case of the empty batch
             constructSpecialBatch();
             // set state to indicate the fact that we have sent a special batch and input is empty
             specialBatchSent = true;
             // If outcome is NONE then we send the special batch in the first iteration and the NONE
             // outcome in the next iteration. If outcome is EMIT, we can send the special
             // batch and the EMIT outcome at the same time.
-            return getFinalOutcome();
+            return IterOutcome.OK;
           }
           // else fall thru
         case OUT_OF_MEMORY:
@@ -238,13 +245,12 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
           // we have to do the special handling
           if (firstBatchForDataSet && popConfig.getKeys().size() == 0 && incoming.getRecordCount() == 0) {
             constructSpecialBatch();
-            // set state to indicate the fact that we have sent a special batch and input is empty
-            specialBatchSent = true;
             firstBatchForDataSet = true; // reset on the next iteration
             // If outcome is NONE then we send the special batch in the first iteration and the NONE
             // outcome in the next iteration. If outcome is EMIT, we can send the special
-            // batch and the EMIT outcome at the same time.
-            return getFinalOutcome();
+            // batch and the EMIT outcome at the same time. (unless the finalOutcome is OK_NEW_SCHEMA)
+            IterOutcome finalOutcome =  getFinalOutcome();
+            return finalOutcome;
           }
           // else fall thru
         case OK:
@@ -269,13 +275,6 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
           }
         }
       }
-      // We sent an EMIT in the previous iteration, so we must be starting a new data set
-      if (firstBatchForDataSet) {
-        done = false;
-        sendEmit = false;
-        specialBatchSent = false;
-        firstBatchForDataSet = false;
-      }
     }
     AggOutcome aggOutcome = aggregator.doWork(lastKnownOutcome);
     recordCount = aggregator.getOutputCount();
@@ -296,14 +295,15 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
         if (firstBatchForDataSet && popConfig.getKeys().size() == 0 && recordCount == 0) {
           // if we have a straight aggregate and empty input batch, we need to handle it in a different way
           constructSpecialBatch();
-          // set state to indicate the fact that we have sent a special batch and input is empty
-          specialBatchSent = true;
           // If outcome is NONE then we send the special batch in the first iteration and the NONE
           // outcome in the next iteration. If outcome is EMIT, we can send the special
           // batch and the EMIT outcome at the same time.
-          return getFinalOutcome();
+
+          IterOutcome finalOutcome =  getFinalOutcome();
+          return finalOutcome;
         }
         firstBatchForDataSet = true;
+        firstBatchForSchema = false;
         if(first) {
           first = false;
         }
@@ -332,9 +332,8 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
           }
         } else if (lastKnownOutcome == OK && first) {
           lastKnownOutcome = OK_NEW_SCHEMA;
-        } else if (lastKnownOutcome != IterOutcome.OUT_OF_MEMORY) {
-          first = false;
         }
+        first = false;
         return lastKnownOutcome;
       case UPDATE_AGGREGATOR:
         // We could get this either between data sets or within a data set.
@@ -629,12 +628,12 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
     }
     if (firstBatchForSchema) {
       outcomeToReturn = OK_NEW_SCHEMA;
+      sendEmit = true;
       firstBatchForSchema = false;
     } else if (lastKnownOutcome == EMIT) {
       firstBatchForDataSet = true;
       outcomeToReturn = EMIT;
     } else {
-      // get the outcome to return before calling refresh since that resets the lastKnowOutcome to OK
       outcomeToReturn = (recordCount == 0) ? NONE : OK;
     }
     return outcomeToReturn;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
index a752c7e..9165850 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggTemplate.java
@@ -40,7 +40,7 @@ public abstract class StreamingAggTemplate implements StreamingAggregator {
 
   // First batch after build schema phase
   private boolean first = true;
-  private boolean firstBatchForSchema = true; // true if the current batch came in with an OK_NEW_SCHEMA.
+  private boolean firstBatchForSchema = false; // true if the current batch came in with an OK_NEW_SCHEMA.
   private boolean firstBatchForDataSet = true; // true if the current batch is the first batch in a data set
 
   private boolean newSchema = false;
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java
index 75c4598..2183efa 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestStreamingAggEmitOutcome.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.physical.impl.agg;
 
 import org.apache.drill.categories.OperatorTest;
+import org.apache.drill.common.logical.data.NamedExpression;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.physical.config.StreamingAggregate;
 import org.apache.drill.exec.physical.impl.BaseTestOpBatchEmitOutcome;
@@ -33,6 +34,8 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.util.ArrayList;
+
 import static org.apache.drill.exec.record.RecordBatch.IterOutcome.EMIT;
 import static org.apache.drill.exec.record.RecordBatch.IterOutcome.OK_NEW_SCHEMA;
 import static org.junit.Assert.assertEquals;
@@ -42,6 +45,7 @@ import static org.junit.Assert.assertTrue;
 public class TestStreamingAggEmitOutcome extends BaseTestOpBatchEmitOutcome {
   //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestStreamingAggEmitOutcome.class);
   protected static TupleMetadata resultSchema;
+  protected static TupleMetadata resultSchemaNoGroupBy;
 
   @BeforeClass
   public static void setUpBeforeClass2() throws Exception {
@@ -49,6 +53,9 @@ public class TestStreamingAggEmitOutcome extends BaseTestOpBatchEmitOutcome {
         .add("name", TypeProtos.MinorType.VARCHAR)
         .addNullable("total_sum", TypeProtos.MinorType.BIGINT)
         .buildSchema();
+    resultSchemaNoGroupBy = new SchemaBuilder()
+        .addNullable("total_sum", TypeProtos.MinorType.BIGINT)
+        .buildSchema();
   }
 
   /**
@@ -611,4 +618,550 @@ public class TestStreamingAggEmitOutcome extends BaseTestOpBatchEmitOutcome {
     assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
     assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
   }
+
+  /*******************************************************
+   * Tests for EMIT with empty batches and no group by
+   * (Tests t1-t8 are repeated with no group by)
+   *******************************************************/
+
+
+  /**
+   * Repeats t1_testStreamingAggrEmptyBatchEmitOutcome with no group by
+   */
+  @Test
+  public void t11_testStreamingAggrEmptyBatchEmitOutcome() {
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+
+    inputOutcomes.add(OK_NEW_SCHEMA);
+    inputOutcomes.add(OK_NEW_SCHEMA);
+    inputOutcomes.add(EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+  }
+
+  /**
+   Repeats t2_testStreamingAggrNonEmptyBatchEmitOutcome with no group by
+   */
+  @Test
+  public void t12_testStreamingAggrNonEmptyBatchEmitOutcome() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(13, 130, "item13")
+        .addRow(13, 130, "item13")
+        .addRow(2, 20, "item2")
+        .addRow(2, 20, "item2")
+        .addRow(4, 40, "item4")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)385)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    // Data before EMIT is returned with an OK_NEW_SCHEMA.
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet).verify(actualRowSet);
+
+    // EMIT comes with an empty batch
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    // Release memory for row sets
+    nonEmptyInputRowSet2.clear();
+    expectedRowSet.clear();
+  }
+
+  /**
+   Repeats t3_testStreamingAggrEmptyBatchFollowedByNonEmptyBatchEmitOutcome with no group by
+   */
+  @Test
+  public void t13_testStreamingAggrEmptyBatchFollowedByNonEmptyBatchEmitOutcome() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(13, 130, "item13")
+        .addRow(0, 1300, "item13")
+        .addRow(2, 20, "item2")
+        .addRow(0, 2000, "item2")
+        .addRow(4, 40, "item4")
+        .addRow(0, 4000, "item4")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)7509)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    // Release memory for row sets
+    nonEmptyInputRowSet2.clear();
+    expectedRowSet.clear();
+  }
+
+  /**
+   Repeats t4_testStreamingAggrMultipleEmptyBatchFollowedByNonEmptyBatchEmitOutcome with no group by
+   */
+  @Test
+  public void t14_testStreamingAggrMultipleEmptyBatchFollowedByNonEmptyBatchEmitOutcome() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(13, 130, "item13")
+        .addRow(0, 0, "item13")
+        .addRow(1, 33000, "item13")
+        .addRow(2, 20, "item2")
+        .addRow(0, 0, "item2")
+        .addRow(1, 11000, "item2")
+        .addRow(4, 40, "item4")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)44211)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    // Release memory for row sets
+    nonEmptyInputRowSet2.clear();
+    expectedRowSet.clear();
+  }
+
+  /**
+   Repeats t5_testStreamingAgrResetsAfterFirstEmitOutcome with no group by
+   */
+  @Test
+  public void t15_testStreamingAgrResetsAfterFirstEmitOutcome() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(2, 20, "item2")
+        .addRow(2, 20, "item2")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .addRow(3, 30, "item3")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet1 = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)11)
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet2 = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)374)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+    inputContainer.add(emptyInputRowSet.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet1).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet2).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    // Release memory for row sets
+    nonEmptyInputRowSet2.clear();
+    expectedRowSet2.clear();
+    expectedRowSet1.clear();
+  }
+
+  /**
+   Repeats t6_testStreamingAggrOkFollowedByNone with no group by
+   */
+  @Test
+  public void t16_testStreamingAggrOkFollowedByNone() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(2, 20, "item2")
+        .addRow(3, 30, "item3")
+        .addRow(4, 40, "item4")
+        .addRow(4, 40, "item4")
+        .addRow(5, 50, "item5")
+        .addRow(5, 50, "item5")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet1 = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)11)
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet2 = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)253)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet1).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet2).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    // Release memory for row sets
+    nonEmptyInputRowSet2.clear();
+    expectedRowSet2.clear();
+    expectedRowSet1.clear();
+  }
+
+  /**
+   Repeats t7_testStreamingAggrMultipleEMITOutcome with no group by
+   */
+  @Test
+  public void t17_testStreamingAggrMultipleEMITOutcome() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(2, 20, "item2")
+        .addRow(3, 30, "item3")
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+    inputContainer.add(emptyInputRowSet.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertEquals(1, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(1, strAggBatch.getRecordCount());
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    nonEmptyInputRowSet2.clear();
+  }
+
+  /**
+   Repeats t8_testStreamingAggrMultipleInputToSingleOutputBatch with no group by
+   */
+  @Test
+  public void t18_testStreamingAggrMultipleInputToSingleOutputBatch() {
+
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(2, 20, "item2")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)33)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+    inputContainer.add(emptyInputRowSet.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.EMIT);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.EMIT);
+    assertEquals(0, strAggBatch.getRecordCount());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    nonEmptyInputRowSet2.clear();
+  }
+
+
+  /**
+   Repeats t9_testStreamingAgr_WithEmptyNonEmptyBatchesAndOKOutcome with no group by
+   */
+  @Test
+  public void t19_testStreamingAgr_WithEmptyNonEmptyBatchesAndOKOutcome() {
+    final RowSet.SingleRowSet nonEmptyInputRowSet2 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(2, 20, "item1")
+        .addRow(13, 130, "item13")
+        .addRow(13, 130, "item13")
+        .addRow(13, 130, "item13")
+        .addRow(130, 1300, "item130")
+        .addRow(0, 0, "item130")
+        .build();
+
+    final RowSet.SingleRowSet nonEmptyInputRowSet3 = operatorFixture.rowSetBuilder(inputSchema)
+        .addRow(23, 230, "item23")
+        .addRow(3, 33, "item3")
+        .addRow(7, 70, "item7")
+        .addRow(17, 170, "item7")
+        .build();
+
+    final RowSet.SingleRowSet expectedRowSet = operatorFixture.rowSetBuilder(resultSchemaNoGroupBy)
+        .addRow((long)2445)
+        .build();
+
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet2.container());
+    inputContainer.add(emptyInputRowSet.container());
+    inputContainer.add(nonEmptyInputRowSet3.container());
+    inputContainer.add(emptyInputRowSet.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+    inputOutcomes.add(RecordBatch.IterOutcome.OK);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertEquals(1, strAggBatch.getRecordCount());
+
+    RowSet actualRowSet = DirectRowSet.fromContainer(strAggBatch.getContainer());
+    new RowSetComparison(expectedRowSet).verify(actualRowSet);
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+
+    nonEmptyInputRowSet2.clear();
+    nonEmptyInputRowSet3.clear();
+    expectedRowSet.clear();
+  }
+
+  /**
+   Repeats t10_testStreamingAggrWithEmptyDataSet with no group by
+   */
+  @Test
+  public void t20_testStreamingAggrWithEmptyDataSet() {
+    inputContainer.add(emptyInputRowSet.container());
+
+    inputOutcomes.add(RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+
+    final MockRecordBatch mockInputBatch = new MockRecordBatch(operatorFixture.getFragmentContext(), opContext,
+        inputContainer, inputOutcomes, emptyInputRowSet.container().getSchema());
+
+    final StreamingAggregate streamAggrConfig = new StreamingAggregate(null,
+        new ArrayList<NamedExpression>(),
+        parseExprs("sum(id_left+cost_left)", "total_sum"),
+        1.0f);
+
+    final StreamingAggBatch strAggBatch = new StreamingAggBatch(streamAggrConfig, mockInputBatch,
+        operatorFixture.getFragmentContext());
+
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK_NEW_SCHEMA);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.OK);
+    assertTrue(strAggBatch.next() == RecordBatch.IterOutcome.NONE);
+  }
+
 }


[drill] 03/03: DRILL-5796 : implement ROWS_MATCH enum to keep inside rowgroup the filter result information, used to prune the filter if all rows match.

Posted by ti...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

timothyfarkas pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit efd6d29910d155cd84692ee8aafff3eb06c6e391
Author: jbimbert <je...@amadeus.com>
AuthorDate: Tue Jun 12 19:22:20 2018 +0200

    DRILL-5796 : implement ROWS_MATCH enum to keep inside rowgroup the filter result information, used to prune the filter if all rows match.
    
    closes #1298
---
 .../exec/expr/stat/ParquetBooleanPredicate.java    |  48 ++-
 .../exec/expr/stat/ParquetComparisonPredicate.java |  78 ++---
 .../exec/expr/stat/ParquetFilterPredicate.java     |  13 +-
 .../drill/exec/expr/stat/ParquetIsPredicate.java   | 125 +++++---
 .../drill/exec/expr/stat/RangeExprEvaluator.java   |  33 +-
 .../store/parquet/AbstractParquetGroupScan.java    |   8 +-
 .../exec/store/parquet/ParquetPushDownFilter.java  |  18 +-
 .../store/parquet/ParquetRGFilterEvaluator.java    |  52 +++-
 .../drill/exec/store/parquet/RowGroupInfo.java     |   5 +
 .../parquet/stat/ParquetFooterStatCollector.java   |   2 +-
 .../parquet/stat/ParquetMetaStatCollector.java     |   2 +-
 .../store/parquet/TestParquetFilterPushDown.java   | 335 +++++++++++++++------
 .../test/resources/parquet/multirowgroup2.parquet  | Bin 0 -> 598 bytes
 .../parquet/multirowgroupwithNulls.parquet         | Bin 0 -> 2063 bytes
 .../resources/parquetFilterPush/tfTbl/ff1.parquet  | Bin 0 -> 251 bytes
 .../resources/parquetFilterPush/tfTbl/ft0.parquet  | Bin 0 -> 251 bytes
 .../resources/parquetFilterPush/tfTbl/tt1.parquet  | Bin 0 -> 251 bytes
 17 files changed, 510 insertions(+), 209 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetBooleanPredicate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetBooleanPredicate.java
index fa5c467..f427dc6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetBooleanPredicate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetBooleanPredicate.java
@@ -46,15 +46,29 @@ public abstract class ParquetBooleanPredicate<C extends Comparable<C>> extends B
       ExpressionPosition pos
   ) {
     return new ParquetBooleanPredicate<C>(name, args, pos) {
+      /**
+       * Evaluates a compound "AND" filter on the statistics of a RowGroup (the filter reads "filterA and filterB").
+       * Return value :<ul>
+       *   <li>ALL : only if all filters return ALL
+       *   <li>NONE : if one filter at least returns NONE
+       *   <li>SOME : all other cases
+       * </ul>
+       */
       @Override
-      public boolean canDrop(RangeExprEvaluator<C> evaluator) {
-        // "and" : as long as one branch is OK to drop, we can drop it.
+      public RowsMatch matches(RangeExprEvaluator<C> evaluator) {
+        RowsMatch resultMatch = RowsMatch.ALL;
         for (LogicalExpression child : this) {
-          if (child instanceof ParquetFilterPredicate && ((ParquetFilterPredicate)child).canDrop(evaluator)) {
-            return true;
+          if (child instanceof ParquetFilterPredicate) {
+            switch (((ParquetFilterPredicate) child).matches(evaluator)) {
+              case NONE:
+                return RowsMatch.NONE;  // No row comply to 1 filter part => can drop RG
+              case SOME:
+                resultMatch = RowsMatch.SOME;
+              default: // Do nothing
+            }
           }
         }
-        return false;
+        return resultMatch;
       }
     };
   }
@@ -66,15 +80,29 @@ public abstract class ParquetBooleanPredicate<C extends Comparable<C>> extends B
       ExpressionPosition pos
   ) {
     return new ParquetBooleanPredicate<C>(name, args, pos) {
+      /**
+       * Evaluates a compound "OR" filter on the statistics of a RowGroup (the filter reads "filterA or filterB").
+       * Return value :<ul>
+       *   <li>NONE : only if all filters return NONE
+       *   <li>ALL : if one filter at least returns ALL
+       *   <li>SOME : all other cases
+       * </ul>
+       */
       @Override
-      public boolean canDrop(RangeExprEvaluator<C> evaluator) {
+      public RowsMatch matches(RangeExprEvaluator<C> evaluator) {
+        RowsMatch resultMatch = RowsMatch.NONE;
         for (LogicalExpression child : this) {
-          // "or" : as long as one branch is NOT ok to drop, we can NOT drop it.
-          if (!(child instanceof ParquetFilterPredicate) || !((ParquetFilterPredicate)child).canDrop(evaluator)) {
-            return false;
+          if (child instanceof ParquetFilterPredicate) {
+            switch (((ParquetFilterPredicate) child).matches(evaluator)) {
+              case ALL:
+                return RowsMatch.ALL;  // One at least is ALL => can drop filter but not RG
+              case SOME:
+                resultMatch = RowsMatch.SOME;
+              default: // Do nothing
+            }
           }
         }
-        return true;
+        return resultMatch;
       }
     };
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetComparisonPredicate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetComparisonPredicate.java
index ebceefb..531cbab 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetComparisonPredicate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetComparisonPredicate.java
@@ -26,8 +26,9 @@ import org.apache.parquet.column.statistics.Statistics;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.function.BiPredicate;
+import java.util.function.BiFunction;
 
+import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.hasNoNulls;
 import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isNullOrEmpty;
 import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isAllNulls;
 
@@ -38,12 +39,13 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
     implements ParquetFilterPredicate<C> {
   private final LogicalExpression left;
   private final LogicalExpression right;
-  private final BiPredicate<Statistics<C>, Statistics<C>> predicate;
+
+  private final BiFunction<Statistics<C>, Statistics<C>, RowsMatch> predicate;
 
   private ParquetComparisonPredicate(
       LogicalExpression left,
       LogicalExpression right,
-      BiPredicate<Statistics<C>, Statistics<C>> predicate
+      BiFunction<Statistics<C>, Statistics<C>, RowsMatch> predicate
   ) {
     super(left.getPosition());
     this.left = left;
@@ -65,7 +67,7 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
   }
 
   /**
-   * Semantics of canDrop() is very similar to what is implemented in Parquet library's
+   * Semantics of matches() is very similar to what is implemented in Parquet library's
    * {@link org.apache.parquet.filter2.statisticslevel.StatisticsFilter} and
    * {@link org.apache.parquet.filter2.predicate.FilterPredicate}
    *
@@ -83,23 +85,29 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
    * where Column1 and Column2 are from same parquet table.
    */
   @Override
-  public boolean canDrop(RangeExprEvaluator<C> evaluator) {
+  public RowsMatch matches(RangeExprEvaluator<C> evaluator) {
     Statistics<C> leftStat = left.accept(evaluator, null);
     if (isNullOrEmpty(leftStat)) {
-      return false;
+      return RowsMatch.SOME;
     }
-
     Statistics<C> rightStat = right.accept(evaluator, null);
     if (isNullOrEmpty(rightStat)) {
-      return false;
+      return RowsMatch.SOME;
     }
-
-    // if either side is ALL null, = is evaluated to UNKNOWN -> canDrop
     if (isAllNulls(leftStat, evaluator.getRowCount()) || isAllNulls(rightStat, evaluator.getRowCount())) {
-      return true;
+      return RowsMatch.NONE;
+    }
+    if (!leftStat.hasNonNullValue() || !rightStat.hasNonNullValue()) {
+      return RowsMatch.SOME;
     }
+    return predicate.apply(leftStat, rightStat);
+  }
 
-    return (leftStat.hasNonNullValue() && rightStat.hasNonNullValue()) && predicate.test(leftStat, rightStat);
+  /**
+   * If one rowgroup contains some null values, change the RowsMatch.ALL into RowsMatch.SOME (null values should be discarded by filter)
+   */
+  private static RowsMatch checkNull(Statistics leftStat, Statistics rightStat) {
+    return !hasNoNulls(leftStat) || !hasNoNulls(rightStat) ? RowsMatch.SOME : RowsMatch.ALL;
   }
 
   /**
@@ -109,12 +117,9 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
       LogicalExpression left,
       LogicalExpression right
   ) {
-    return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> {
-      // can drop when left's max < right's min, or right's max < left's min
-      final C leftMin = leftStat.genericGetMin();
-      final C rightMin = rightStat.genericGetMin();
-      return (leftStat.compareMaxToValue(rightMin) < 0) || (rightStat.compareMaxToValue(leftMin) < 0);
-    }) {
+    return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) ->
+      leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0 || rightStat.compareMaxToValue(leftStat.genericGetMin()) < 0 ? RowsMatch.NONE : RowsMatch.SOME
+    ) {
       @Override
       public String toString() {
         return left + " = " + right;
@@ -130,9 +135,10 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
       LogicalExpression right
   ) {
     return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> {
-      // can drop when left's max <= right's min.
-      final C rightMin = rightStat.genericGetMin();
-      return leftStat.compareMaxToValue(rightMin) <= 0;
+      if (leftStat.compareMaxToValue(rightStat.genericGetMin()) <= 0) {
+        return RowsMatch.NONE;
+      }
+      return leftStat.compareMinToValue(rightStat.genericGetMax()) > 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME;
     });
   }
 
@@ -144,9 +150,10 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
       LogicalExpression right
   ) {
     return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> {
-      // can drop when left's max < right's min.
-      final C rightMin = rightStat.genericGetMin();
-      return leftStat.compareMaxToValue(rightMin) < 0;
+      if (leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0) {
+        return RowsMatch.NONE;
+      }
+      return leftStat.compareMinToValue(rightStat.genericGetMax()) >= 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME;
     });
   }
 
@@ -158,9 +165,10 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
       LogicalExpression right
   ) {
     return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> {
-      // can drop when right's max <= left's min.
-      final C leftMin = leftStat.genericGetMin();
-      return rightStat.compareMaxToValue(leftMin) <= 0;
+      if (rightStat.compareMaxToValue(leftStat.genericGetMin()) <= 0) {
+        return RowsMatch.NONE;
+      }
+      return leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME;
     });
   }
 
@@ -171,9 +179,10 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
       LogicalExpression left, LogicalExpression right
   ) {
     return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> {
-      // can drop when right's max < left's min.
-      final C leftMin = leftStat.genericGetMin();
-      return rightStat.compareMaxToValue(leftMin) < 0;
+      if (rightStat.compareMaxToValue(leftStat.genericGetMin()) < 0) {
+        return RowsMatch.NONE;
+      }
+      return leftStat.compareMaxToValue(rightStat.genericGetMin()) <= 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME;
     });
   }
 
@@ -185,11 +194,10 @@ public class ParquetComparisonPredicate<C extends Comparable<C>> extends Logical
       LogicalExpression right
   ) {
     return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> {
-      // can drop when there is only one unique value.
-      final C leftMax = leftStat.genericGetMax();
-      final C rightMax = rightStat.genericGetMax();
-      return leftStat.compareMinToValue(leftMax) == 0 && rightStat.compareMinToValue(rightMax) == 0 &&
-          leftStat.compareMaxToValue(rightMax) == 0;
+      if (leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0 || rightStat.compareMaxToValue(leftStat.genericGetMin()) < 0) {
+        return checkNull(leftStat, rightStat);
+      }
+      return leftStat.compareMaxToValue(rightStat.genericGetMax()) == 0 && leftStat.compareMinToValue(rightStat.genericGetMin()) == 0 ? RowsMatch.NONE : RowsMatch.SOME;
     });
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java
index 1b7e9e5..c472d48 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetFilterPredicate.java
@@ -18,5 +18,16 @@
 package org.apache.drill.exec.expr.stat;
 
 public interface ParquetFilterPredicate<T extends Comparable<T>> {
-  boolean canDrop(RangeExprEvaluator<T> evaluator);
+
+  /**
+   * Define the validity of a row group against a filter
+   * <ul>
+   *   <li>ALL : all rows match the filter (can not drop the row group and can prune the filter)
+   *   <li>NONE : no row matches the filter (can drop the row group)
+   *   <li>SOME : some rows only match the filter or the filter can not be applied (can not drop the row group nor the filter)
+   * </ul>
+   */
+  enum RowsMatch {ALL, NONE, SOME}
+
+  RowsMatch matches(RangeExprEvaluator<T> evaluator);
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetIsPredicate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetIsPredicate.java
index 42e6e0b..e69dd8b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetIsPredicate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/ParquetIsPredicate.java
@@ -19,7 +19,6 @@ package org.apache.drill.exec.expr.stat;
 
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.LogicalExpressionBase;
-import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.expression.TypedFieldExpr;
 import org.apache.drill.common.expression.visitors.ExprVisitor;
 import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
@@ -29,7 +28,7 @@ import org.apache.parquet.column.statistics.Statistics;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.function.BiPredicate;
+import java.util.function.BiFunction;
 
 import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.hasNoNulls;
 import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isAllNulls;
@@ -42,9 +41,10 @@ public class ParquetIsPredicate<C extends Comparable<C>> extends LogicalExpressi
     implements ParquetFilterPredicate<C> {
 
   private final LogicalExpression expr;
-  private final BiPredicate<Statistics<C>, RangeExprEvaluator<C>> predicate;
 
-  private ParquetIsPredicate(LogicalExpression expr, BiPredicate<Statistics<C>, RangeExprEvaluator<C>> predicate) {
+  private final BiFunction<Statistics<C>, RangeExprEvaluator<C>, RowsMatch> predicate;
+
+  private ParquetIsPredicate(LogicalExpression expr, BiFunction<Statistics<C>, RangeExprEvaluator<C>, RowsMatch> predicate) {
     super(expr.getPosition());
     this.expr = expr;
     this.predicate = predicate;
@@ -62,14 +62,22 @@ public class ParquetIsPredicate<C extends Comparable<C>> extends LogicalExpressi
     return visitor.visitUnknown(this, value);
   }
 
-  @Override
-  public boolean canDrop(RangeExprEvaluator<C> evaluator) {
+  /**
+   * Apply the filter condition against the meta of the rowgroup.
+   */
+  public RowsMatch matches(RangeExprEvaluator<C> evaluator) {
     Statistics<C> exprStat = expr.accept(evaluator, null);
-    if (isNullOrEmpty(exprStat)) {
-      return false;
-    }
+    return isNullOrEmpty(exprStat) ? RowsMatch.SOME : predicate.apply(exprStat, evaluator);
+  }
 
-    return predicate.test(exprStat, evaluator);
+  /**
+   * After the applying of the filter against the statistics of the rowgroup, if the result is RowsMatch.ALL,
+   * then we still must know if the rowgroup contains some null values, because it can change the filter result.
+   * If it contains some null values, then we change the RowsMatch.ALL into RowsMatch.SOME, which sya that maybe
+   * some values (the null ones) should be disgarded.
+   */
+  private static RowsMatch checkNull(Statistics exprStat) {
+    return hasNoNulls(exprStat) ? RowsMatch.ALL : RowsMatch.SOME;
   }
 
   /**
@@ -77,26 +85,20 @@ public class ParquetIsPredicate<C extends Comparable<C>> extends LogicalExpressi
    */
   private static <C extends Comparable<C>> LogicalExpression createIsNullPredicate(LogicalExpression expr) {
     return new ParquetIsPredicate<C>(expr,
-        //if there are no nulls  -> canDrop
-        (exprStat, evaluator) -> hasNoNulls(exprStat)) {
-      private final boolean isArray = isArray(expr);
-
-      private boolean isArray(LogicalExpression expression) {
-        if (expression instanceof TypedFieldExpr) {
-          TypedFieldExpr typedFieldExpr = (TypedFieldExpr) expression;
-          SchemaPath schemaPath = typedFieldExpr.getPath();
-          return schemaPath.isArray();
-        }
-        return false;
-      }
-
-      @Override
-      public boolean canDrop(RangeExprEvaluator<C> evaluator) {
+      (exprStat, evaluator) -> {
         // for arrays we are not able to define exact number of nulls
         // [1,2,3] vs [1,2] -> in second case 3 is absent and thus it's null but statistics shows no nulls
-        return !isArray && super.canDrop(evaluator);
-      }
-    };
+        if (expr instanceof TypedFieldExpr) {
+          TypedFieldExpr typedFieldExpr = (TypedFieldExpr) expr;
+          if (typedFieldExpr.getPath().isArray()) {
+            return RowsMatch.SOME;
+          }
+        }
+        if (hasNoNulls(exprStat)) {
+          return RowsMatch.NONE;
+        }
+        return isAllNulls(exprStat, evaluator.getRowCount()) ? RowsMatch.ALL : RowsMatch.SOME;
+      });
   }
 
   /**
@@ -104,8 +106,7 @@ public class ParquetIsPredicate<C extends Comparable<C>> extends LogicalExpressi
    */
   private static <C extends Comparable<C>> LogicalExpression createIsNotNullPredicate(LogicalExpression expr) {
     return new ParquetIsPredicate<C>(expr,
-        //if there are all nulls  -> canDrop
-        (exprStat, evaluator) -> isAllNulls(exprStat, evaluator.getRowCount())
+      (exprStat, evaluator) -> isAllNulls(exprStat, evaluator.getRowCount()) ? RowsMatch.NONE : checkNull(exprStat)
     );
   }
 
@@ -113,40 +114,72 @@ public class ParquetIsPredicate<C extends Comparable<C>> extends LogicalExpressi
    * IS TRUE predicate.
    */
   private static LogicalExpression createIsTruePredicate(LogicalExpression expr) {
-    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) ->
-        //if max value is not true or if there are all nulls  -> canDrop
-        isAllNulls(exprStat, evaluator.getRowCount()) || exprStat.hasNonNullValue() && !((BooleanStatistics) exprStat).getMax()
-    );
+    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> {
+      if (isAllNulls(exprStat, evaluator.getRowCount())) {
+        return RowsMatch.NONE;
+      }
+      if (!exprStat.hasNonNullValue()) {
+        return RowsMatch.SOME;
+      }
+      if (!((BooleanStatistics) exprStat).getMax()) {
+        return RowsMatch.NONE;
+      }
+      return ((BooleanStatistics) exprStat).getMin() ? checkNull(exprStat) : RowsMatch.SOME;
+    });
   }
 
   /**
    * IS FALSE predicate.
    */
   private static LogicalExpression createIsFalsePredicate(LogicalExpression expr) {
-    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) ->
-        //if min value is not false or if there are all nulls  -> canDrop
-        isAllNulls(exprStat, evaluator.getRowCount()) || exprStat.hasNonNullValue() && ((BooleanStatistics) exprStat).getMin()
-    );
+    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> {
+      if (isAllNulls(exprStat, evaluator.getRowCount())) {
+        return RowsMatch.NONE;
+      }
+      if (!exprStat.hasNonNullValue()) {
+        return RowsMatch.SOME;
+      }
+      if (((BooleanStatistics) exprStat).getMin()) {
+        return RowsMatch.NONE;
+      }
+      return ((BooleanStatistics) exprStat).getMax() ? RowsMatch.SOME : checkNull(exprStat);
+    });
   }
 
   /**
    * IS NOT TRUE predicate.
    */
   private static LogicalExpression createIsNotTruePredicate(LogicalExpression expr) {
-    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) ->
-        //if min value is not false or if there are no nulls  -> canDrop
-        hasNoNulls(exprStat) && exprStat.hasNonNullValue() && ((BooleanStatistics) exprStat).getMin()
-    );
+    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> {
+      if (isAllNulls(exprStat, evaluator.getRowCount())) {
+        return RowsMatch.ALL;
+      }
+      if (!exprStat.hasNonNullValue()) {
+        return RowsMatch.SOME;
+      }
+      if (((BooleanStatistics) exprStat).getMin()) {
+        return hasNoNulls(exprStat) ? RowsMatch.NONE : RowsMatch.SOME;
+      }
+      return ((BooleanStatistics) exprStat).getMax() ? RowsMatch.SOME : RowsMatch.ALL;
+    });
   }
 
   /**
    * IS NOT FALSE predicate.
    */
   private static LogicalExpression createIsNotFalsePredicate(LogicalExpression expr) {
-    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) ->
-        //if max value is not true or if there are no nulls  -> canDrop
-        hasNoNulls(exprStat) && exprStat.hasNonNullValue() && !((BooleanStatistics) exprStat).getMax()
-    );
+    return new ParquetIsPredicate<Boolean>(expr, (exprStat, evaluator) -> {
+      if (isAllNulls(exprStat, evaluator.getRowCount())) {
+        return RowsMatch.ALL;
+      }
+      if (!exprStat.hasNonNullValue()) {
+        return RowsMatch.SOME;
+      }
+      if (!((BooleanStatistics) exprStat).getMax()) {
+        return hasNoNulls(exprStat) ? RowsMatch.NONE : RowsMatch.SOME;
+      }
+      return ((BooleanStatistics) exprStat).getMin() ? RowsMatch.ALL : RowsMatch.SOME;
+    });
   }
 
   public static <C extends Comparable<C>> LogicalExpression createIsPredicate(String function, LogicalExpression expr) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java
index f127f0b..2b55e3d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/stat/RangeExprEvaluator.java
@@ -258,21 +258,28 @@ public class RangeExprEvaluator<T extends Comparable<T>> extends AbstractExprVis
       final ValueHolder minFuncHolder = InterpreterEvaluator.evaluateFunction(interpreter, args1, holderExpr.getName());
       final ValueHolder maxFuncHolder = InterpreterEvaluator.evaluateFunction(interpreter, args2, holderExpr.getName());
 
+      Statistics<T> statistics;
       switch (destType) {
-      //TODO : need handle # of nulls.
-      case INT:
-        return getStatistics( ((IntHolder)minFuncHolder).value, ((IntHolder)maxFuncHolder).value);
-      case BIGINT:
-        return getStatistics( ((BigIntHolder)minFuncHolder).value, ((BigIntHolder)maxFuncHolder).value);
-      case FLOAT4:
-        return getStatistics( ((Float4Holder)minFuncHolder).value, ((Float4Holder)maxFuncHolder).value);
-      case FLOAT8:
-        return getStatistics( ((Float8Holder)minFuncHolder).value, ((Float8Holder)maxFuncHolder).value);
-      case TIMESTAMP:
-        return getStatistics(((TimeStampHolder) minFuncHolder).value, ((TimeStampHolder) maxFuncHolder).value);
-      default:
-        return null;
+        case INT:
+          statistics = getStatistics(((IntHolder) minFuncHolder).value, ((IntHolder) maxFuncHolder).value);
+          break;
+        case BIGINT:
+          statistics = getStatistics(((BigIntHolder) minFuncHolder).value, ((BigIntHolder) maxFuncHolder).value);
+          break;
+        case FLOAT4:
+          statistics = getStatistics(((Float4Holder) minFuncHolder).value, ((Float4Holder) maxFuncHolder).value);
+          break;
+        case FLOAT8:
+          statistics = getStatistics(((Float8Holder) minFuncHolder).value, ((Float8Holder) maxFuncHolder).value);
+          break;
+        case TIMESTAMP:
+          statistics = getStatistics(((TimeStampHolder) minFuncHolder).value, ((TimeStampHolder) maxFuncHolder).value);
+          break;
+        default:
+          return null;
       }
+      statistics.setNumNulls(input.getNumNulls());
+      return statistics;
     } catch (Exception e) {
       throw new DrillRuntimeException("Error in evaluating function of " + holderExpr.getName() );
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/AbstractParquetGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/AbstractParquetGroupScan.java
index 33472bb..bf292be 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/AbstractParquetGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/AbstractParquetGroupScan.java
@@ -262,12 +262,14 @@ public abstract class AbstractParquetGroupScan extends AbstractFileGroupScan {
         }
       }
 
-      if (ParquetRGFilterEvaluator.canDrop(filterPredicate, columnStatisticsMap, rowGroup.getRowCount())) {
-        continue;
+      ParquetFilterPredicate.RowsMatch match = ParquetRGFilterEvaluator.matches(filterPredicate, columnStatisticsMap, rowGroup.getRowCount(), parquetTableMetadata, rowGroup.getColumns(), schemaPathsInExpr);
+      if (match == ParquetFilterPredicate.RowsMatch.NONE) {
+        continue; // No row comply to the filter => drop the row group
       }
+      rowGroup.setRowsMatch(match);
 
       qualifiedRGs.add(rowGroup);
-      qualifiedFilePath.add(rowGroup.getPath());  // TODO : optimize when 1 file contains m row groups.
+      qualifiedFilePath.add(rowGroup.getPath());
     }
 
     if (qualifiedRGs.size() == rowGroupInfos.size() ) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
index 83ce4d2..b5f0ca4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
@@ -29,6 +29,8 @@ import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexUtil;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.ValueExpressions;
+import org.apache.drill.exec.expr.stat.ParquetFilterPredicate;
+import org.apache.drill.exec.expr.stat.ParquetFilterPredicate.RowsMatch;
 import org.apache.drill.exec.ops.OptimizerRulesContext;
 import org.apache.drill.exec.physical.base.GroupScan;
 import org.apache.drill.exec.planner.common.DrillRelOptUtil;
@@ -165,12 +167,26 @@ public abstract class ParquetPushDownFilter extends StoragePluginOptimizerRule {
       return;
     }
 
-
     RelNode newScan = ScanPrel.create(scan, scan.getTraitSet(), newGroupScan, scan.getRowType());;
 
     if (project != null) {
       newScan = project.copy(project.getTraitSet(), ImmutableList.of(newScan));
     }
+
+    if (newGroupScan instanceof AbstractParquetGroupScan) {
+      RowsMatch matchAll = RowsMatch.ALL;
+      List<RowGroupInfo> rowGroupInfos = ((AbstractParquetGroupScan) newGroupScan).rowGroupInfos;
+      for (RowGroupInfo rowGroup : rowGroupInfos) {
+        if (rowGroup.getRowsMatch() != RowsMatch.ALL) {
+          matchAll = RowsMatch.SOME;
+          break;
+        }
+      }
+      if (matchAll == ParquetFilterPredicate.RowsMatch.ALL) {
+        call.transformTo(newScan);
+      }
+    }
+
     final RelNode newFilter = filter.copy(filter.getTraitSet(), ImmutableList.<RelNode>of(newScan));
     call.transformTo(newFilter);
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRGFilterEvaluator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRGFilterEvaluator.java
index 370988b..3e7bc65 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRGFilterEvaluator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRGFilterEvaluator.java
@@ -27,6 +27,7 @@ import org.apache.drill.exec.compile.sig.ConstantExpressionIdentifier;
 import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
 import org.apache.drill.exec.expr.fn.FunctionLookupContext;
 import org.apache.drill.exec.expr.stat.ParquetFilterPredicate;
+import org.apache.drill.exec.expr.stat.ParquetFilterPredicate.RowsMatch;
 import org.apache.drill.exec.expr.stat.RangeExprEvaluator;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.UdfUtilities;
@@ -37,19 +38,23 @@ import org.apache.drill.exec.store.parquet.stat.ParquetFooterStatCollector;
 import org.apache.parquet.hadoop.metadata.ParquetMetadata;
 
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.ColumnMetadata;
+import static org.apache.drill.exec.store.parquet.metadata.MetadataBase.ParquetTableMetadataBase;
+
 public class ParquetRGFilterEvaluator {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetRGFilterEvaluator.class);
 
-  public static boolean evalFilter(LogicalExpression expr, ParquetMetadata footer, int rowGroupIndex,
+  public static RowsMatch evalFilter(LogicalExpression expr, ParquetMetadata footer, int rowGroupIndex,
       OptionManager options, FragmentContext fragmentContext) {
     final HashMap<String, String> emptyMap = new HashMap<String, String>();
     return evalFilter(expr, footer, rowGroupIndex, options, fragmentContext, emptyMap);
   }
 
-  public static boolean evalFilter(LogicalExpression expr, ParquetMetadata footer, int rowGroupIndex,
+  public static RowsMatch evalFilter(LogicalExpression expr, ParquetMetadata footer, int rowGroupIndex,
       OptionManager options, FragmentContext fragmentContext, Map<String, String> implicitColValues) {
     // figure out the set of columns referenced in expression.
     final Set<SchemaPath> schemaPathsInExpr = expr.accept(new FieldReferenceFinder(), null);
@@ -57,23 +62,19 @@ public class ParquetRGFilterEvaluator {
 
     Map<SchemaPath, ColumnStatistics> columnStatisticsMap = columnStatCollector.collectColStat(schemaPathsInExpr);
 
-    boolean canDrop = canDrop(expr, columnStatisticsMap, footer.getBlocks().get(rowGroupIndex).getRowCount(), fragmentContext, fragmentContext.getFunctionRegistry());
-    return canDrop;
+    return matches(expr, columnStatisticsMap, footer.getBlocks().get(rowGroupIndex).getRowCount(), fragmentContext, fragmentContext.getFunctionRegistry());
   }
 
-
-  public static boolean canDrop(ParquetFilterPredicate parquetPredicate, Map<SchemaPath,
+  public static RowsMatch matches(ParquetFilterPredicate parquetPredicate, Map<SchemaPath,
       ColumnStatistics> columnStatisticsMap, long rowCount) {
-    boolean canDrop = false;
     if (parquetPredicate != null) {
       RangeExprEvaluator rangeExprEvaluator = new RangeExprEvaluator(columnStatisticsMap, rowCount);
-      canDrop = parquetPredicate.canDrop(rangeExprEvaluator);
+      return parquetPredicate.matches(rangeExprEvaluator);
     }
-    return canDrop;
+    return RowsMatch.SOME;
   }
 
-
-  public static boolean canDrop(LogicalExpression expr, Map<SchemaPath, ColumnStatistics> columnStatisticsMap,
+  public static RowsMatch matches(LogicalExpression expr, Map<SchemaPath, ColumnStatistics> columnStatisticsMap,
       long rowCount, UdfUtilities udfUtilities, FunctionLookupContext functionImplementationRegistry) {
     ErrorCollector errorCollector = new ErrorCollectorImpl();
     LogicalExpression materializedFilter = ExpressionTreeMaterializer.materializeFilterExpr(
@@ -82,14 +83,39 @@ public class ParquetRGFilterEvaluator {
     if (errorCollector.hasErrors()) {
       logger.error("{} error(s) encountered when materialize filter expression : {}",
           errorCollector.getErrorCount(), errorCollector.toErrorString());
-      return false;
+      return RowsMatch.SOME;
     }
 
     Set<LogicalExpression> constantBoundaries = ConstantExpressionIdentifier.getConstantExpressionSet(materializedFilter);
     ParquetFilterPredicate parquetPredicate = (ParquetFilterPredicate) ParquetFilterBuilder.buildParquetFilterPredicate(
         materializedFilter, constantBoundaries, udfUtilities);
 
-    return canDrop(parquetPredicate, columnStatisticsMap, rowCount);
+    return matches(parquetPredicate, columnStatisticsMap, rowCount);
+  }
+
+  public static RowsMatch matches(ParquetFilterPredicate parquetPredicate, Map<SchemaPath, ColumnStatistics> columnStatisticsMap, long rowCount, ParquetTableMetadataBase parquetTableMetadata, List<? extends ColumnMetadata> columnMetadataList, Set<SchemaPath> schemaPathsInExpr) {
+    RowsMatch temp = matches(parquetPredicate, columnStatisticsMap, rowCount);
+    return temp == RowsMatch.ALL && isRepeated(schemaPathsInExpr, parquetTableMetadata, columnMetadataList) ? RowsMatch.SOME : temp;
+  }
+
+  /**
+   * Check if one of the fields involved in the filter is an array (used in DRILL_6259_test_data).
+   *
+   * @return true if one at least is an array, false otherwise.
+   */
+  private static boolean isRepeated(Set<SchemaPath> fields, ParquetTableMetadataBase parquetTableMetadata, List<? extends ColumnMetadata> columnMetadataList) {
+    final Map<SchemaPath, ColumnMetadata> columnMetadataMap = new HashMap<>();
+    for (final ColumnMetadata columnMetadata : columnMetadataList) {
+      SchemaPath schemaPath = SchemaPath.getCompoundPath(columnMetadata.getName());
+      columnMetadataMap.put(schemaPath, columnMetadata);
+    }
+    for (final SchemaPath field : fields) {
+      ColumnMetadata columnMetadata = columnMetadataMap.get(field.getUnIndexed());
+      if (columnMetadata != null && parquetTableMetadata.getRepetitionLevel(columnMetadata.getName()) >= 1) {
+        return true;
+      }
+    }
+    return false;
   }
 
   /**
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/RowGroupInfo.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/RowGroupInfo.java
index af436d8..7d2143c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/RowGroupInfo.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/RowGroupInfo.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.store.parquet;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.drill.exec.expr.stat.ParquetFilterPredicate.RowsMatch;
 import org.apache.drill.exec.store.dfs.ReadEntryFromHDFS;
 import org.apache.drill.exec.store.dfs.easy.FileWork;
 import org.apache.drill.exec.store.schedule.CompleteWork;
@@ -35,6 +36,7 @@ public class RowGroupInfo extends ReadEntryFromHDFS implements CompleteWork, Fil
   private List<? extends ColumnMetadata> columns;
   private long rowCount;  // rowCount = -1 indicates to include all rows.
   private long numRecordsToRead;
+  private RowsMatch rowsMatch = RowsMatch.SOME;
 
   @JsonCreator
   public RowGroupInfo(@JsonProperty("path") String path,
@@ -95,4 +97,7 @@ public class RowGroupInfo extends ReadEntryFromHDFS implements CompleteWork, Fil
     this.columns = columns;
   }
 
+  public RowsMatch getRowsMatch() { return rowsMatch; }
+
+  public void setRowsMatch(RowsMatch rowsMatch) { this.rowsMatch = rowsMatch; }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetFooterStatCollector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetFooterStatCollector.java
index ac63bda..4e73d6b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetFooterStatCollector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetFooterStatCollector.java
@@ -59,7 +59,7 @@ public class ParquetFooterStatCollector implements ColumnStatCollector {
     // Reasons to pass implicit columns and their values:
     // 1. Differentiate implicit columns from regular non-exist columns. Implicit columns do not
     //    exist in parquet metadata. Without such knowledge, implicit columns is treated as non-exist
-    //    column.  A condition on non-exist column would lead to canDrop = true, which is not the
+    //    column.  A condition on non-exist column would lead to matches = ALL, which is not the
     //    right behavior for condition on implicit columns.
 
     // 2. Pass in the implicit column name with corresponding values, and wrap them in Statistics with
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetMetaStatCollector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetMetaStatCollector.java
index 437074e..a46191b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetMetaStatCollector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/stat/ParquetMetaStatCollector.java
@@ -59,7 +59,7 @@ public class ParquetMetaStatCollector implements  ColumnStatCollector {
     // Reasons to pass implicit columns and their values:
     // 1. Differentiate implicit columns from regular non-exist columns. Implicit columns do not
     //    exist in parquet metadata. Without such knowledge, implicit columns is treated as non-exist
-    //    column.  A condition on non-exist column would lead to canDrop = true, which is not the
+    //    column.  A condition on non-exist column would lead to matches = ALL, which is not the
     //    right behavior for condition on implicit columns.
 
     // 2. Pass in the implicit column name with corresponding values, and wrap them in Statistics with
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
index c871ccc..ea12f40 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetFilterPushDown.java
@@ -20,12 +20,17 @@ package org.apache.drill.exec.store.parquet;
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.exec.expr.fn.FunctionGenerationHelper;
+import org.apache.drill.exec.expr.stat.ParquetFilterPredicate.RowsMatch;
+import org.apache.drill.exec.expr.stat.ParquetIsPredicate;
+import org.apache.drill.exec.expr.stat.RangeExprEvaluator;
 import org.apache.drill.exec.ops.FragmentContextImpl;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.proto.BitControl;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.parquet.column.statistics.BooleanStatistics;
 import org.apache.parquet.format.converter.ParquetMetadataConverter;
 import org.apache.parquet.hadoop.ParquetFileReader;
 import org.apache.parquet.hadoop.metadata.ParquetMetadata;
@@ -36,6 +41,8 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestWatcher;
 import org.junit.runner.Description;
+import org.mockito.ArgumentMatchers;
+import org.mockito.Mockito;
 
 import java.io.File;
 import java.io.IOException;
@@ -57,6 +64,8 @@ public class TestParquetFilterPushDown extends PlanTestBase {
 
     dirTestWatcher.copyResourceToRoot(Paths.get("parquetFilterPush"));
     dirTestWatcher.copyResourceToRoot(Paths.get("parquet", "multirowgroup.parquet"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("parquet", "multirowgroup2.parquet"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("parquet", "multirowgroupwithNulls.parquet"));
   }
 
   @AfterClass
@@ -97,73 +106,75 @@ public class TestParquetFilterPushDown extends PlanTestBase {
       .toFile();
     ParquetMetadata footer = getParquetMetaData(file);
 
-    testParquetRowGroupFilterEval(footer, "intCol = 100", false);
-    testParquetRowGroupFilterEval(footer, "intCol = 0", false);
-    testParquetRowGroupFilterEval(footer, "intCol = 50", false);
+    testParquetRowGroupFilterEval(footer, "intCol = 100", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol = 0", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol = 50", RowsMatch.SOME);
 
-    testParquetRowGroupFilterEval(footer, "intCol = -1", true);
-    testParquetRowGroupFilterEval(footer, "intCol = 101", true);
+    testParquetRowGroupFilterEval(footer, "intCol = -1", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol = 101", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "intCol > 100", true);
-    testParquetRowGroupFilterEval(footer, "intCol > 99", false);
+    testParquetRowGroupFilterEval(footer, "intCol > 100", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol > 99", RowsMatch.SOME);
 
-    testParquetRowGroupFilterEval(footer, "intCol >= 100", false);
-    testParquetRowGroupFilterEval(footer, "intCol >= 101", true);
+    testParquetRowGroupFilterEval(footer, "intCol >= 100", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol >= 101", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "intCol < 100", false);
-    testParquetRowGroupFilterEval(footer, "intCol < 1", false);
-    testParquetRowGroupFilterEval(footer, "intCol < 0", true);
+    testParquetRowGroupFilterEval(footer, "intCol < 100", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol < 1", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol < 0", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "intCol <= 100", false);
-    testParquetRowGroupFilterEval(footer, "intCol <= 1", false);
-    testParquetRowGroupFilterEval(footer, "intCol <= 0", false);
-    testParquetRowGroupFilterEval(footer, "intCol <= -1", true);
+    testParquetRowGroupFilterEval(footer, "intCol <= 100", RowsMatch.ALL);
+    testParquetRowGroupFilterEval(footer, "intCol <= 1", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol <= 0", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol <= -1", RowsMatch.NONE);
 
     // "and"
-    testParquetRowGroupFilterEval(footer, "intCol > 100 and intCol  < 200", true);
-    testParquetRowGroupFilterEval(footer, "intCol > 50 and intCol < 200", false);
-    testParquetRowGroupFilterEval(footer, "intCol > 50 and intCol > 200", true); // essentially, intCol > 200
+    testParquetRowGroupFilterEval(footer, "intCol > 100 and intCol < 200", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol > 50 and intCol < 200", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol > 50 and intCol > 200", RowsMatch.NONE); // essentially, intCol > 200
 
     // "or"
-    testParquetRowGroupFilterEval(footer, "intCol = 150 or intCol = 160", true);
-    testParquetRowGroupFilterEval(footer, "intCol = 50 or intCol = 160", false);
+    testParquetRowGroupFilterEval(footer, "intCol = 150 or intCol = 160", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol = 50 or intCol = 160", RowsMatch.SOME);
 
     //"nonExistCol" does not exist in the table. "AND" with a filter on exist column
-    testParquetRowGroupFilterEval(footer, "intCol > 100 and nonExistCol = 100", true);
-    testParquetRowGroupFilterEval(footer, "intCol > 50 and nonExistCol = 100", true); // since nonExistCol = 100 -> Unknown -> could drop.
-    testParquetRowGroupFilterEval(footer, "nonExistCol = 100 and intCol > 50", true); // since nonExistCol = 100 -> Unknown -> could drop.
-    testParquetRowGroupFilterEval(footer, "intCol > 100 and nonExistCol < 'abc'", true);
-    testParquetRowGroupFilterEval(footer, "nonExistCol < 'abc' and intCol > 100", true); // nonExistCol < 'abc' hit NumberException and is ignored, but intCol >100 will say "drop".
-    testParquetRowGroupFilterEval(footer, "intCol > 50 and nonExistCol < 'abc'", false); // because nonExistCol < 'abc' hit NumberException and is ignored.
+    testParquetRowGroupFilterEval(footer, "intCol > 100 and nonExistCol = 100", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol > 50 and nonExistCol = 100", RowsMatch.NONE); // since nonExistCol = 100 -> Unknown -> could drop.
+    testParquetRowGroupFilterEval(footer, "nonExistCol = 100 and intCol > 50", RowsMatch.NONE); // since nonExistCol = 100 -> Unknown -> could drop.
+    testParquetRowGroupFilterEval(footer, "intCol > 100 and nonExistCol < 'abc'", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "nonExistCol < 'abc' and intCol > 100", RowsMatch.NONE); // nonExistCol < 'abc' hit NumberException and is ignored, but intCol >100 will
+    // say "drop".
+    testParquetRowGroupFilterEval(footer, "intCol > 50 and nonExistCol < 'abc'", RowsMatch.SOME); // because nonExistCol < 'abc' hit NumberException and
+    // is ignored.
 
     //"nonExistCol" does not exist in the table. "OR" with a filter on exist column
-    testParquetRowGroupFilterEval(footer, "intCol > 100 or nonExistCol = 100", true); // nonExistCol = 100 -> could drop.
-    testParquetRowGroupFilterEval(footer, "nonExistCol = 100 or intCol > 100", true); // nonExistCol = 100 -> could drop.
-    testParquetRowGroupFilterEval(footer, "intCol > 50 or nonExistCol < 100", false);
-    testParquetRowGroupFilterEval(footer, "nonExistCol < 100 or intCol > 50", false);
+    testParquetRowGroupFilterEval(footer, "intCol > 100 or nonExistCol = 100", RowsMatch.NONE); // nonExistCol = 100 -> could drop.
+    testParquetRowGroupFilterEval(footer, "nonExistCol = 100 or intCol > 100", RowsMatch.NONE); // nonExistCol = 100 -> could drop.
+    testParquetRowGroupFilterEval(footer, "intCol > 50 or nonExistCol < 100", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "nonExistCol < 100 or intCol > 50", RowsMatch.SOME);
 
     // cast function on column side (LHS)
-    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 100", false);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 0", false);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 50", false);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 101", true);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = -1", true);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 100", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 0", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 50", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = 101", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as bigint) = -1", RowsMatch.NONE);
 
     // cast function on constant side (RHS)
-    testParquetRowGroupFilterEval(footer, "intCol = cast(100 as bigint)", false);
-    testParquetRowGroupFilterEval(footer, "intCol = cast(0 as bigint)", false);
-    testParquetRowGroupFilterEval(footer, "intCol = cast(50 as bigint)", false);
-    testParquetRowGroupFilterEval(footer, "intCol = cast(101 as bigint)", true);
-    testParquetRowGroupFilterEval(footer, "intCol = cast(-1 as bigint)", true);
+    testParquetRowGroupFilterEval(footer, "intCol = cast(100 as bigint)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol = cast(0 as bigint)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol = cast(50 as bigint)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "intCol = cast(101 as bigint)", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol = cast(-1 as bigint)", RowsMatch.NONE);
 
     // cast into float4/float8
-    testParquetRowGroupFilterEval(footer, "cast(intCol as float4) = cast(101.0 as float4)", true);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as float4) = cast(-1.0 as float4)", true);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as float4) = cast(1.0 as float4)", false);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as float4) = cast(101.0 as float4)", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as float4) = cast(-1.0 as float4)", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as float4) = cast(1.0 as float4)", RowsMatch.SOME);
 
-    testParquetRowGroupFilterEval(footer, "cast(intCol as float8) = 101.0", true);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as float8) = -1.0", true);
-    testParquetRowGroupFilterEval(footer, "cast(intCol as float8) = 1.0", false);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as float8) = 101.0", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as float8) = -1.0", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "cast(intCol as float8) = 1.0", RowsMatch.SOME);
   }
 
   @Test
@@ -176,15 +187,15 @@ public class TestParquetFilterPushDown extends PlanTestBase {
       .toFile();
     ParquetMetadata footer = getParquetMetaData(file);
 
-    testParquetRowGroupFilterEval(footer, "intCol = 100", true);
-    testParquetRowGroupFilterEval(footer, "intCol = 0", true);
-    testParquetRowGroupFilterEval(footer, "intCol = -100", true);
+    testParquetRowGroupFilterEval(footer, "intCol = 100", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol = 0", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol = -100", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "intCol > 10", true);
-    testParquetRowGroupFilterEval(footer, "intCol >= 10", true);
+    testParquetRowGroupFilterEval(footer, "intCol > 10", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol >= 10", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "intCol < 10", true);
-    testParquetRowGroupFilterEval(footer, "intCol <= 10", true);
+    testParquetRowGroupFilterEval(footer, "intCol < 10", RowsMatch.NONE);
+    testParquetRowGroupFilterEval(footer, "intCol <= 10", RowsMatch.NONE);
   }
 
   @Test
@@ -216,21 +227,21 @@ public class TestParquetFilterPushDown extends PlanTestBase {
   }
 
   private void testDatePredicateAgainstDrillCTASHelper(ParquetMetadata footer) throws Exception{
-    testParquetRowGroupFilterEval(footer, "o_orderdate = cast('1992-01-01' as date)", false);
-    testParquetRowGroupFilterEval(footer, "o_orderdate = cast('1991-12-31' as date)", true);
+    testParquetRowGroupFilterEval(footer, "o_orderdate = cast('1992-01-01' as date)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_orderdate = cast('1991-12-31' as date)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_orderdate >= cast('1991-12-31' as date)", false);
-    testParquetRowGroupFilterEval(footer, "o_orderdate >= cast('1992-01-03' as date)", false);
-    testParquetRowGroupFilterEval(footer, "o_orderdate >= cast('1992-01-04' as date)", true);
+    testParquetRowGroupFilterEval(footer, "o_orderdate >= cast('1991-12-31' as date)", RowsMatch.ALL);
+    testParquetRowGroupFilterEval(footer, "o_orderdate >= cast('1992-01-03' as date)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_orderdate >= cast('1992-01-04' as date)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_orderdate > cast('1992-01-01' as date)", false);
-    testParquetRowGroupFilterEval(footer, "o_orderdate > cast('1992-01-03' as date)", true);
+    testParquetRowGroupFilterEval(footer, "o_orderdate > cast('1992-01-01' as date)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_orderdate > cast('1992-01-03' as date)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_orderdate <= cast('1992-01-01' as date)", false);
-    testParquetRowGroupFilterEval(footer, "o_orderdate <= cast('1991-12-31' as date)", true);
+    testParquetRowGroupFilterEval(footer, "o_orderdate <= cast('1992-01-01' as date)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_orderdate <= cast('1991-12-31' as date)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_orderdate < cast('1992-01-02' as date)", false);
-    testParquetRowGroupFilterEval(footer, "o_orderdate < cast('1992-01-01' as date)", true);
+    testParquetRowGroupFilterEval(footer, "o_orderdate < cast('1992-01-02' as date)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_orderdate < cast('1992-01-01' as date)", RowsMatch.NONE);
   }
 
   @Test
@@ -243,25 +254,99 @@ public class TestParquetFilterPushDown extends PlanTestBase {
       .toFile();
     ParquetMetadata footer = getParquetMetaData(file);
 
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp = cast('1992-01-01 10:20:30' as timestamp)", false);
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp = cast('1992-01-01 10:20:29' as timestamp)", true);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp = cast('1992-01-01 10:20:30' as timestamp)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp = cast('1992-01-01 10:20:29' as timestamp)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp >= cast('1992-01-01 10:20:29' as timestamp)", false);
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp >= cast('1992-01-03 10:20:30' as timestamp)", false);
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp >= cast('1992-01-03 10:20:31' as timestamp)", true);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp >= cast('1992-01-01 10:20:29' as timestamp)", RowsMatch.ALL);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp >= cast('1992-01-03 10:20:30' as timestamp)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp >= cast('1992-01-03 10:20:31' as timestamp)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp > cast('1992-01-03 10:20:29' as timestamp)", false);
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp > cast('1992-01-03 10:20:30' as timestamp)", true);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp > cast('1992-01-03 10:20:29' as timestamp)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp > cast('1992-01-03 10:20:30' as timestamp)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp <= cast('1992-01-01 10:20:30' as timestamp)", false);
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp <= cast('1992-01-01 10:20:29' as timestamp)", true);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp <= cast('1992-01-01 10:20:30' as timestamp)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp <= cast('1992-01-01 10:20:29' as timestamp)", RowsMatch.NONE);
 
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp < cast('1992-01-01 10:20:31' as timestamp)", false);
-    testParquetRowGroupFilterEval(footer, "o_ordertimestamp < cast('1992-01-01 10:20:30' as timestamp)", true);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp < cast('1992-01-01 10:20:31' as timestamp)", RowsMatch.SOME);
+    testParquetRowGroupFilterEval(footer, "o_ordertimestamp < cast('1992-01-01 10:20:30' as timestamp)", RowsMatch.NONE);
 
   }
 
   @Test
+  public void testFilterPruning() throws Exception {
+    // multirowgroup2 is a parquet file with 3 rowgroups inside. One with a=0, another with a=1 and a=2, and the last with a=3 and a=4;
+    // FilterPushDown should be able to prune the filter from the scan operator according to the rowgroup statistics.
+    final String sql = "select * from dfs.`parquet/multirowgroup2.parquet` where ";
+    PlanTestBase.testPlanMatchingPatterns(sql + "a > 1", new String[]{"numRowGroups=2"}); //No filter pruning
+    PlanTestBase.testPlanMatchingPatterns(sql + "a > 2", new String[]{"numRowGroups=1"}, new String[]{"Filter\\("}); // Filter pruning
+
+    PlanTestBase.testPlanMatchingPatterns(sql + "a < 2", new String[]{"numRowGroups=2"}); // No filter pruning
+    PlanTestBase.testPlanMatchingPatterns(sql + "a < 1", new String[]{"numRowGroups=1"}, new String[]{"Filter\\("}); // Filter pruning
+
+    PlanTestBase.testPlanMatchingPatterns(sql + "a >= 2", new String[]{"numRowGroups=2"}); // No filter pruning
+    PlanTestBase.testPlanMatchingPatterns(sql + "a >= 1", new String[]{"numRowGroups=2"}, new String[]{"Filter\\("}); // Filter pruning
+
+    PlanTestBase.testPlanMatchingPatterns(sql + "a <= 1", new String[]{"numRowGroups=2"}); // No filter pruning
+    PlanTestBase.testPlanMatchingPatterns(sql + "a <= 2", new String[]{"numRowGroups=2"}, new String[]{"Filter\\("}); // Filter pruning
+
+    PlanTestBase.testPlanMatchingPatterns(sql + "a > 0 and a < 2", new String[]{"numRowGroups=1"}); // No filter pruning
+    PlanTestBase.testPlanMatchingPatterns(sql + "a > 0 and a < 3", new String[]{"numRowGroups=1"}, new String[]{"Filter\\("}); //Filter pruning
+
+    PlanTestBase.testPlanMatchingPatterns(sql + "a < 1 or a > 1", new String[]{"numRowGroups=3"}); // No filter pruning
+    PlanTestBase.testPlanMatchingPatterns(sql + "a < 1 or a > 2", new String[]{"numRowGroups=2"}, new String[]{"Filter\\("}); //Filter pruning
+  }
+
+  @Test
+  public void testFilterPruningWithNulls() throws Exception {
+    // multirowgroupwithNulls is a parquet file with 4 rowgroups inside and some groups contain null values.
+    // RG1 : [min: 20, max: 29, num_nulls: 0]
+    // RG2 : [min: 31, max: 39, num_nulls: 1]
+    // RG3 : [min: 40, max: 49, num_nulls: 1]
+    // RG4 : [min: 50, max: 59, num_nulls: 0]
+    final String sql = "select a from dfs.`parquet/multirowgroupwithNulls.parquet` where ";
+    // "<" "and" ">" with filter
+    testParquetFilterPruning(sql + "30 < a and 40 > a", 9, 1, null);
+    testParquetFilterPruning(sql + "30 < a and a < 40", 9, 1, null);
+    testParquetFilterPruning(sql + "a > 30 and 40 > a", 9, 1, null);
+    testParquetFilterPruning(sql + "a > 30 and a < 40", 9, 1, null);
+    // "<" "and" ">" with no filter
+    testParquetFilterPruning(sql + "19 < a and 30 > a", 10, 1, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "19 < a and a < 30", 10, 1, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "a > 19 and 30 > a", 10, 1, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "a > 19 and a < 30", 10, 1, new String[]{"Filter\\("});
+    // "<=" "and" ">=" with filter
+    testParquetFilterPruning(sql + "a >= 30 and 39 >= a", 9, 1, null);
+    testParquetFilterPruning(sql + "a >= 30 and a <= 39", 9, 1, null);
+    testParquetFilterPruning(sql + "30 <= a and 39 >= a", 9, 1, null);
+    testParquetFilterPruning(sql + "30 <= a and a <= 39", 9, 1, null);
+    // "<=" "and" ">=" with no filter
+    testParquetFilterPruning(sql + "a >= 20 and a <= 29", 10, 1, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "a >= 20 and 29 >= a", 10, 1, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "20 <= a and a <= 29", 10, 1, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "20 <= a and 29 >= a", 10, 1, new String[]{"Filter\\("});
+    // "<" "or" ">" with filter
+    testParquetFilterPruning(sql + "a < 40 or a > 49", 29, 3, null);
+    testParquetFilterPruning(sql + "a < 40 or 49 < a", 29, 3, null);
+    testParquetFilterPruning(sql + "40 > a or a > 49", 29, 3, null);
+    testParquetFilterPruning(sql + "40 > a or 49 < a", 29, 3, null);
+    // "<" "or" ">" with no filter
+    testParquetFilterPruning(sql + "a < 30 or a > 49", 20, 2, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "a < 30 or 49 < a", 20, 2, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "30 > a or a > 49", 20, 2, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "30 > a or 49 < a", 20, 2, new String[]{"Filter\\("});
+    // "<=" "or" ">=" with filter
+    testParquetFilterPruning(sql + "a <= 39 or a >= 50", 29, 3, null);
+    testParquetFilterPruning(sql + "a <= 39 or 50 <= a", 29, 3, null);
+    testParquetFilterPruning(sql + "39 >= a or a >= 50", 29, 3, null);
+    testParquetFilterPruning(sql + "39 >= a or 50 <= a", 29, 3, null);
+    // "<=" "or" ">=" with no filter
+    testParquetFilterPruning(sql + "a <= 29 or a >= 50", 20, 2, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "a <= 29 or 50 <= a", 20, 2, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "29 >= a or a >= 50", 20, 2, new String[]{"Filter\\("});
+    testParquetFilterPruning(sql + "29 >= a or 50 <= a", 20, 2, new String[]{"Filter\\("});
+  }
+
+  @Test
   // Test against parquet files from Drill CTAS post 1.8.0 release.
   public void testDatePredicateAgaistDrillCTASPost1_8() throws  Exception {
     test("use dfs.tmp");
@@ -428,6 +513,15 @@ public class TestParquetFilterPushDown extends PlanTestBase {
 
     final String queryEqualTrueWithAnd = "select col_bln from dfs.`parquetFilterPush/blnTbl` where col_bln = true and unk_col = 'a'";
     testParquetFilterPD(queryEqualTrueWithAnd, 0, 2, false);
+
+    // File ff1.parquet has column with the values: false, null, false.
+    // File tt1.parquet has column with the values: true, null, true.
+    // File ft0.parquet has column with the values: false, true.
+    final String query = "select a from dfs.`parquetFilterPush/tfTbl` where ";
+    testParquetFilterPD(query + "a is true", 3, 2, false);
+    testParquetFilterPD(query + "a is false", 3, 2, false);
+    testParquetFilterPD(query + "a is not true", 5, 1, false);
+    testParquetFilterPD(query + "a is not false", 5, 1, false);
   }
 
   @Test // DRILL-5359
@@ -478,18 +572,89 @@ public class TestParquetFilterPushDown extends PlanTestBase {
     String[] expectedPlan = {"numRowGroups=2"};
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan);
 
-    testBuilder()
-      .sqlQuery(query)
-      .unOrdered()
-      .baselineColumns("cnt")
-      .baselineValues(2L)
-      .go();
+    testBuilder().sqlQuery(query).unOrdered().baselineColumns("cnt").baselineValues(2L).go();
+  }
+
+  @Test // testing min=false, max=true, min/max set, no nulls
+  public void testMinFalseMaxTrue() throws Exception {
+    LogicalExpression le = Mockito.mock(LogicalExpression.class);
+    BooleanStatistics booleanStatistics = Mockito.mock(BooleanStatistics.class);
+    Mockito.doReturn(booleanStatistics).when(le).accept(ArgumentMatchers.any(), ArgumentMatchers.any());
+    RangeExprEvaluator<Boolean> re = Mockito.mock(RangeExprEvaluator.class);
+    Mockito.when(re.getRowCount()).thenReturn(Long.valueOf(2)); // 2 rows
+    Mockito.when(booleanStatistics.isEmpty()).thenReturn(false); // stat is not empty
+    Mockito.when(booleanStatistics.isNumNullsSet()).thenReturn(true); // num_nulls set
+    Mockito. when(booleanStatistics.getNumNulls()).thenReturn(Long.valueOf(0)); // no nulls
+    Mockito. when(booleanStatistics.hasNonNullValue()).thenReturn(true); // min/max set
+    Mockito.when(booleanStatistics.getMin()).thenReturn(false); // min false
+    Mockito.when(booleanStatistics.getMax()).thenReturn(true); // max true
+    ParquetIsPredicate isTrue = (ParquetIsPredicate) ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_TRUE, le);
+    assertEquals(RowsMatch.SOME, isTrue.matches(re));
+    ParquetIsPredicate isFalse = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_FALSE, le);
+    assertEquals(RowsMatch.SOME, isFalse.matches(re));
+    ParquetIsPredicate isNotTrue = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_NOT_TRUE, le);
+    assertEquals(RowsMatch.SOME, isNotTrue.matches(re));
+    ParquetIsPredicate isNotFalse = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_NOT_FALSE, le);
+    assertEquals(RowsMatch.SOME, isNotFalse.matches(re));
+  }
+
+  @Test // testing min=false, max=false, min/max set, no nulls
+  public void testMinFalseMaxFalse() throws Exception {
+    LogicalExpression le = Mockito.mock(LogicalExpression.class);
+    BooleanStatistics booleanStatistics = Mockito.mock(BooleanStatistics.class);
+    Mockito.doReturn(booleanStatistics).when(le).accept(ArgumentMatchers.any(), ArgumentMatchers.any());
+    RangeExprEvaluator<Boolean> re = Mockito.mock(RangeExprEvaluator.class);
+    Mockito.when(re.getRowCount()).thenReturn(Long.valueOf(2)); // 2 rows
+    Mockito.when(booleanStatistics.isEmpty()).thenReturn(false); // stat is not empty
+    Mockito.when(booleanStatistics.isNumNullsSet()).thenReturn(true); // num_nulls set
+    Mockito. when(booleanStatistics.getNumNulls()).thenReturn(Long.valueOf(0)); // no nulls
+    Mockito. when(booleanStatistics.hasNonNullValue()).thenReturn(true); // min/max set
+    Mockito.when(booleanStatistics.getMin()).thenReturn(false); // min false
+    Mockito.when(booleanStatistics.getMax()).thenReturn(false); // max false
+    ParquetIsPredicate isTrue = (ParquetIsPredicate) ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_TRUE, le);
+    assertEquals(RowsMatch.NONE, isTrue.matches(re));
+    ParquetIsPredicate isFalse = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_FALSE, le);
+    assertEquals(RowsMatch.ALL, isFalse.matches(re));
+    ParquetIsPredicate isNotTrue = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_NOT_TRUE, le);
+    assertEquals(RowsMatch.ALL, isNotTrue.matches(re));
+    ParquetIsPredicate isNotFalse = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_NOT_FALSE, le);
+    assertEquals(RowsMatch.NONE, isNotFalse.matches(re));
+  }
+
+  @Test // testing min=true, max=true, min/max set, no nulls
+  public void testMinTrueMaxTrue() throws Exception {
+    LogicalExpression le = Mockito.mock(LogicalExpression.class);
+    BooleanStatistics booleanStatistics = Mockito.mock(BooleanStatistics.class);
+    Mockito.doReturn(booleanStatistics).when(le).accept(ArgumentMatchers.any(), ArgumentMatchers.any());
+    RangeExprEvaluator<Boolean> re = Mockito.mock(RangeExprEvaluator.class);
+    Mockito.when(re.getRowCount()).thenReturn(Long.valueOf(2)); // 2 rows
+    Mockito.when(booleanStatistics.isEmpty()).thenReturn(false); // stat is not empty
+    Mockito.when(booleanStatistics.isNumNullsSet()).thenReturn(true); // num_nulls set
+    Mockito. when(booleanStatistics.getNumNulls()).thenReturn(Long.valueOf(0)); // no nulls
+    Mockito. when(booleanStatistics.hasNonNullValue()).thenReturn(true); // min/max set
+    Mockito.when(booleanStatistics.getMin()).thenReturn(true); // min false
+    Mockito.when(booleanStatistics.getMax()).thenReturn(true); // max true
+    ParquetIsPredicate isTrue = (ParquetIsPredicate) ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_TRUE, le);
+    assertEquals(RowsMatch.ALL, isTrue.matches(re));
+    ParquetIsPredicate isFalse = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_FALSE, le);
+    assertEquals(RowsMatch.NONE, isFalse.matches(re));
+    ParquetIsPredicate isNotTrue = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_NOT_TRUE, le);
+    assertEquals(RowsMatch.NONE, isNotTrue.matches(re));
+    ParquetIsPredicate isNotFalse = (ParquetIsPredicate)  ParquetIsPredicate.createIsPredicate(FunctionGenerationHelper.IS_NOT_FALSE, le);
+    assertEquals(RowsMatch.ALL, isNotFalse.matches(re));
   }
 
   //////////////////////////////////////////////////////////////////////////////////////////////////
   // Some test helper functions.
   //////////////////////////////////////////////////////////////////////////////////////////////////
 
+  private void testParquetFilterPruning(final String query, int expectedRowCount, int expectedRowgroups, String[] excludedPattern) throws Exception{
+    int actualRowCount = testSql(query);
+    assertEquals(expectedRowCount, actualRowCount);
+    String numRowGroupPattern = "numRowGroups=" + expectedRowgroups;
+    testPlanMatchingPatterns(query, new String[]{numRowGroupPattern}, excludedPattern);
+  }
+
   private void testParquetFilterPD(final String query, int expectedRowCount, int expectedNumFiles, boolean usedMetadataFile) throws Exception{
     int actualRowCount = testSql(query);
     assertEquals(expectedRowCount, actualRowCount);
@@ -499,13 +664,13 @@ public class TestParquetFilterPushDown extends PlanTestBase {
     testPlanMatchingPatterns(query, new String[]{numFilesPattern, usedMetaPattern});
   }
 
-  private void testParquetRowGroupFilterEval(final ParquetMetadata footer, final String exprStr, boolean canDropExpected) throws Exception{
+  private void testParquetRowGroupFilterEval(final ParquetMetadata footer, final String exprStr, RowsMatch canDropExpected) throws Exception{
     final LogicalExpression filterExpr = parseExpr(exprStr);
     testParquetRowGroupFilterEval(footer, 0, filterExpr, canDropExpected);
   }
 
-  private void testParquetRowGroupFilterEval(final ParquetMetadata footer, final int rowGroupIndex, final LogicalExpression filterExpr, boolean canDropExpected) {
-    boolean canDrop = ParquetRGFilterEvaluator.evalFilter(filterExpr, footer, rowGroupIndex, fragContext.getOptions(), fragContext);
+  private void testParquetRowGroupFilterEval(final ParquetMetadata footer, final int rowGroupIndex, final LogicalExpression filterExpr, RowsMatch canDropExpected) {
+    RowsMatch canDrop = ParquetRGFilterEvaluator.evalFilter(filterExpr, footer, rowGroupIndex, fragContext.getOptions(), fragContext);
     Assert.assertEquals(canDropExpected, canDrop);
   }
 
diff --git a/exec/java-exec/src/test/resources/parquet/multirowgroup2.parquet b/exec/java-exec/src/test/resources/parquet/multirowgroup2.parquet
new file mode 100644
index 0000000..5139802
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquet/multirowgroup2.parquet differ
diff --git a/exec/java-exec/src/test/resources/parquet/multirowgroupwithNulls.parquet b/exec/java-exec/src/test/resources/parquet/multirowgroupwithNulls.parquet
new file mode 100644
index 0000000..084b315
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquet/multirowgroupwithNulls.parquet differ
diff --git a/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ff1.parquet b/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ff1.parquet
new file mode 100644
index 0000000..79c2362
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ff1.parquet differ
diff --git a/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ft0.parquet b/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ft0.parquet
new file mode 100644
index 0000000..c0c51c4
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/ft0.parquet differ
diff --git a/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/tt1.parquet b/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/tt1.parquet
new file mode 100644
index 0000000..35ca274
Binary files /dev/null and b/exec/java-exec/src/test/resources/parquetFilterPush/tfTbl/tt1.parquet differ