You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by pa...@apache.org on 2018/06/02 04:50:08 UTC

[drill] branch master updated (f68d3e9 -> 9908ea0)

This is an automated email from the ASF dual-hosted git repository.

parthc pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git.


    from f68d3e9  DRILL-6356: batch sizing for union all
     new eded745  DRILL-4091: Adding support for additional gis operations in gis contrib module
     new 04a532d  DRILL-4364: Image Metadata Format Plugin - Initial commit of Image Metadata Format Plugin - See https://issues.apache.org/jira/browse/DRILL-4364
     new 5ef220e  DRILL-6343: bit vector copyFromSafe is not doing realloc
     new 480ade9  DRILL-6236:Batch sizing for hash join
     new f48894c  DRILL-6445: Fix existing test cases in TestScripts.java and add new test case for DRILLBIT_CONTEXT variable
     new 494d828  DRILL-6450: Visualized plans for profiles querying JDBC sources is broken
     new fde66d7  DRILL-6456: Planner shouldn't create any exchanges on the right side of Lateral Join.
     new 0d5eda8  DRILL-4020: The not-equal operator returns incorrect results when used on the HBase row key
     new 7f30b04  DRILL-5924: native-client: Support user-specified CXX_FLAGS
     new 9908ea0  DRILL-5584: Add branding and versioning information for windows C++ Client.

The 10 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 contrib/gis/pom.xml                                |   6 +-
 contrib/gis/sample-data/CA-cities-with-nulls.csv   |   3 +
 contrib/gis/sample-data/polygons.tsv               |   5 +
 .../esri/core/geometry/VertexGeomAccessor.java}    |  21 +-
 .../gis/{STGeomFromTextSrid.java => STBuffer.java} |  33 +-
 .../gis/{STGeomFromText.java => STContains.java}   |  36 +-
 .../gis/{STGeomFromText.java => STCrosses.java}    |  35 +-
 .../{STGeomFromTextSrid.java => STDifference.java} |  36 +-
 .../gis/{STGeomFromText.java => STDisjoint.java}   |  35 +-
 .../gis/{STGeomFromText.java => STDistance.java}   |  36 +-
 .../{STGeomFromTextSrid.java => STEnvelope.java}   |  39 +-
 .../gis/{STGeomFromText.java => STEquals.java}     |  35 +-
 .../exec/expr/fn/impl/gis/STGeomFromText.java      |   4 +-
 .../exec/expr/fn/impl/gis/STGeomFromTextSrid.java  |   2 +-
 .../gis/{STGeomFromText.java => STIntersects.java} |  36 +-
 .../gis/{STGeomFromText.java => STOverlaps.java}   |  35 +-
 .../drill/exec/expr/fn/impl/gis/STRelate.java      |  73 +++
 .../gis/{STGeomFromText.java => STTouches.java}    |  35 +-
 .../drill/exec/expr/fn/impl/gis/STTransform.java   | 114 +++++
 .../gis/{STGeomFromTextSrid.java => STUnion.java}  |  35 +-
 .../exec/expr/fn/impl/gis/STUnionAggregate.java    | 115 +++++
 .../impl/gis/{STGeomFromText.java => STXFunc.java} |  30 +-
 .../impl/gis/{STGeomFromText.java => STXMax.java}  |  39 +-
 .../impl/gis/{STGeomFromText.java => STXMin.java}  |  39 +-
 .../impl/gis/{STGeomFromText.java => STYFunc.java} |  30 +-
 .../impl/gis/{STGeomFromText.java => STYMax.java}  |  39 +-
 .../impl/gis/{STGeomFromText.java => STYMin.java}  |  39 +-
 .../expr/fn/impl/gis/TestGeometryFunctions.java    | 228 +++++++++-
 contrib/native/client/CMakeLists.txt               |   9 +-
 contrib/native/client/src/clientlib/CMakeLists.txt |  12 +-
 contrib/native/client/src/clientlib/env.h.in       |   7 +-
 contrib/native/client/src/clientlib/version.rc.in  |  68 +++
 .../client/src/clientlib/y2038/CMakeLists.txt      |   8 -
 contrib/native/client/src/protobuf/CMakeLists.txt  |   4 -
 .../drill/exec/store/hbase/HBaseFilterBuilder.java |   1 +
 .../drill/hbase/TestHBaseFilterPushDown.java       |  18 +
 .../org/apache/drill/exec/store/jdbc/JdbcPrel.java |  15 +-
 .../org/apache/drill/yarn/scripts/ScriptUtils.java |  98 +++-
 .../org/apache/drill/yarn/scripts/TestScripts.java | 161 +++++--
 exec/java-exec/pom.xml                             |   5 +
 .../exec/physical/impl/join/HashJoinBatch.java     | 125 ++++--
 .../join/HashJoinMechanicalMemoryCalculator.java   |   1 +
 .../impl/join/HashJoinMemoryCalculator.java        |   1 +
 .../impl/join/HashJoinMemoryCalculatorImpl.java    |  35 +-
 .../exec/physical/impl/join/HashJoinProbe.java     |   2 +
 .../physical/impl/join/HashJoinProbeTemplate.java  |  22 +-
 .../visitor/ExcessiveExchangeIdentifier.java       |  72 ++-
 .../exec/record/AbstractBinaryRecordBatch.java     |   4 +
 .../drill/exec/record/JoinBatchMemoryManager.java  |  61 ++-
 .../exec/record/RecordBatchMemoryManager.java      |  26 +-
 .../apache/drill/exec/record/RecordBatchSizer.java |  49 +-
 .../store/image/GenericMetadataDescriptor.java     |  89 ++++
 .../exec/store/image/GenericMetadataDirectory.java | 315 +++++++++++++
 .../exec/store/image/GenericMetadataReader.java    | 412 +++++++++++++++++
 .../drill/exec/store/image/ImageFormatConfig.java  |  97 ++++
 .../drill/exec/store/image/ImageFormatPlugin.java  |  82 ++++
 .../drill/exec/store/image/ImageRecordReader.java  | 493 +++++++++++++++++++++
 .../main/resources/bootstrap-storage-plugins.json  |  18 +
 .../impl/join/TestBuildSidePartitioningImpl.java   |  20 +-
 .../impl/lateraljoin/TestLateralPlans.java         | 164 ++++++-
 .../exec/physical/unit/TestOutputBatchSize.java    | 386 ++++++++++++++++
 .../store/dfs/TestFormatPluginOptionExtractor.java |   7 +
 .../exec/store/image/TestImageRecordReader.java    | 128 ++++++
 .../src/test/resources/store/image/1_webp_a.webp   | Bin 0 -> 23404 bytes
 .../src/test/resources/store/image/adobeJpeg1.eps  | Bin 0 -> 99569 bytes
 .../src/test/resources/store/image/avi.json        |  32 ++
 .../src/test/resources/store/image/bmp.json        |  36 ++
 .../src/test/resources/store/image/eps.json        | 116 +++++
 .../src/test/resources/store/image/gif.json        |  47 ++
 .../src/test/resources/store/image/ico.json        |  33 ++
 .../src/test/resources/store/image/jpeg.json       | 213 +++++++++
 .../src/test/resources/store/image/mov.json        |  67 +++
 .../src/test/resources/store/image/mp4.json        |  56 +++
 .../src/test/resources/store/image/pcx.json        |  37 ++
 .../src/test/resources/store/image/png.json        |  57 +++
 .../src/test/resources/store/image/psd.json        | 119 +++++
 .../store/image/rose-128x174-24bit-lzw.tiff        | Bin 0 -> 50476 bytes
 .../resources/store/image/rose-128x174-24bit.bmp   | Bin 0 -> 66872 bytes
 .../resources/store/image/rose-128x174-24bit.pcx   | Bin 0 -> 34864 bytes
 .../store/image/rose-128x174-32bit-alpha.png       | Bin 0 -> 26308 bytes
 .../store/image/rose-128x174-32bit-alpha.psd       | Bin 0 -> 102618 bytes
 .../store/image/rose-128x174-8bit-alpha.gif        | Bin 0 -> 10463 bytes
 .../store/image/rose-32x32-32bit-alpha.ico         | Bin 0 -> 4286 bytes
 .../src/test/resources/store/image/sample.avi      | Bin 0 -> 375688 bytes
 .../src/test/resources/store/image/sample.mov      | Bin 0 -> 469690 bytes
 .../src/test/resources/store/image/sample.mp4      | Bin 0 -> 383631 bytes
 .../src/test/resources/store/image/sample.wav      | Bin 0 -> 37534 bytes
 .../src/test/resources/store/image/tiff.json       |  87 ++++
 .../src/test/resources/store/image/wav.json        |  32 ++
 .../src/test/resources/store/image/webp.json       |  29 ++
 .../test/resources/store/image/withExifAndIptc.jpg | Bin 0 -> 44606 bytes
 exec/jdbc-all/pom.xml                              |   4 +
 .../org/apache/drill/exec/vector/BitVector.java    |   8 +-
 93 files changed, 4657 insertions(+), 544 deletions(-)
 create mode 100644 contrib/gis/sample-data/CA-cities-with-nulls.csv
 create mode 100644 contrib/gis/sample-data/polygons.tsv
 copy contrib/{native/client/src/clientlib/env.h.in => gis/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java} (65%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromTextSrid.java => STBuffer.java} (63%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STContains.java} (59%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STCrosses.java} (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromTextSrid.java => STDifference.java} (59%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STDisjoint.java} (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STDistance.java} (58%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromTextSrid.java => STEnvelope.java} (60%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STEquals.java} (61%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STIntersects.java} (59%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STOverlaps.java} (60%)
 create mode 100644 contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STTouches.java} (61%)
 create mode 100644 contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromTextSrid.java => STUnion.java} (61%)
 create mode 100644 contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STXFunc.java} (66%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STXMax.java} (60%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STXMin.java} (60%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STYFunc.java} (66%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STYMax.java} (60%)
 copy contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/{STGeomFromText.java => STYMin.java} (60%)
 create mode 100644 contrib/native/client/src/clientlib/version.rc.in
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDescriptor.java
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDirectory.java
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataReader.java
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatConfig.java
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java
 create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageRecordReader.java
 create mode 100644 exec/java-exec/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
 create mode 100644 exec/java-exec/src/test/resources/store/image/1_webp_a.webp
 create mode 100644 exec/java-exec/src/test/resources/store/image/adobeJpeg1.eps
 create mode 100644 exec/java-exec/src/test/resources/store/image/avi.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/bmp.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/eps.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/gif.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/ico.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/jpeg.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/mov.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/mp4.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/pcx.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/png.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/psd.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-128x174-24bit-lzw.tiff
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.bmp
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.pcx
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.png
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.psd
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-128x174-8bit-alpha.gif
 create mode 100644 exec/java-exec/src/test/resources/store/image/rose-32x32-32bit-alpha.ico
 create mode 100644 exec/java-exec/src/test/resources/store/image/sample.avi
 create mode 100644 exec/java-exec/src/test/resources/store/image/sample.mov
 create mode 100644 exec/java-exec/src/test/resources/store/image/sample.mp4
 create mode 100644 exec/java-exec/src/test/resources/store/image/sample.wav
 create mode 100644 exec/java-exec/src/test/resources/store/image/tiff.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/wav.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/webp.json
 create mode 100644 exec/java-exec/src/test/resources/store/image/withExifAndIptc.jpg

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 09/10: DRILL-5924: native-client: Support user-specified CXX_FLAGS

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 7f30b0464f60a617be727c6c8b31b5c634564cff
Author: Korn, Uwe <Uw...@blue-yonder.com>
AuthorDate: Sat Oct 28 21:45:51 2017 +0200

    DRILL-5924: native-client: Support user-specified CXX_FLAGS
    
    This closes #1022
---
 contrib/native/client/CMakeLists.txt                     | 9 +++++----
 contrib/native/client/src/clientlib/CMakeLists.txt       | 1 -
 contrib/native/client/src/clientlib/y2038/CMakeLists.txt | 8 --------
 contrib/native/client/src/protobuf/CMakeLists.txt        | 4 ----
 4 files changed, 5 insertions(+), 17 deletions(-)

diff --git a/contrib/native/client/CMakeLists.txt b/contrib/native/client/CMakeLists.txt
index 3f6c44b..f434c41 100644
--- a/contrib/native/client/CMakeLists.txt
+++ b/contrib/native/client/CMakeLists.txt
@@ -101,13 +101,14 @@ find_package(Boost 1.53.0 REQUIRED COMPONENTS regex system date_time chrono thre
 include_directories(${Boost_INCLUDE_DIRS})
 
 
-if(CMAKE_COMPILER_IS_GNUCXX)
-    set(CMAKE_EXE_LINKER_FLAGS "-lrt -lpthread")
-    set(CMAKE_CXX_FLAGS "-fPIC")
+if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_COMPILER_IS_GNUCC)
+    set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -lrt -lpthread")
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC")
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC")
 endif()
 
 if(MSVC)
-    set(CMAKE_CXX_FLAGS "/EHsc")
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHsc")
 endif()
 
 if(MSVC)
diff --git a/contrib/native/client/src/clientlib/CMakeLists.txt b/contrib/native/client/src/clientlib/CMakeLists.txt
index 7b9ecc3..e793e66 100644
--- a/contrib/native/client/src/clientlib/CMakeLists.txt
+++ b/contrib/native/client/src/clientlib/CMakeLists.txt
@@ -50,7 +50,6 @@ set_property(
     )
 
 if(MSVC)
-    set(CMAKE_CXX_FLAGS "/EHsc")
     add_definitions(-DDRILL_CLIENT_EXPORTS -D_SCL_SECURE_NO_WARNINGS)
 endif()
 
diff --git a/contrib/native/client/src/clientlib/y2038/CMakeLists.txt b/contrib/native/client/src/clientlib/y2038/CMakeLists.txt
index 1cb4d72..bfc3fb4 100644
--- a/contrib/native/client/src/clientlib/y2038/CMakeLists.txt
+++ b/contrib/native/client/src/clientlib/y2038/CMakeLists.txt
@@ -18,14 +18,6 @@
 
 # Y2038 library
 
-if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_COMPILER_IS_GNUCC)
-    set(CMAKE_CXX_FLAGS "-fPIC")
-endif()
-
-if(CMAKE_COMPILER_IS_GNUCC)
-    set(CMAKE_C_FLAGS "-fPIC")
-endif()
-
 set (Y2038_SRC_FILES
     ${CMAKE_CURRENT_SOURCE_DIR}/time64.c
     )
diff --git a/contrib/native/client/src/protobuf/CMakeLists.txt b/contrib/native/client/src/protobuf/CMakeLists.txt
index 95001c5..b113234 100644
--- a/contrib/native/client/src/protobuf/CMakeLists.txt
+++ b/contrib/native/client/src/protobuf/CMakeLists.txt
@@ -103,10 +103,6 @@ add_custom_target(cpProtobufs
 #message("ProtoHeaders =  ${ProtoHeaders}" )
 #message("ProtoIncludes =  ${ProtoIncludes}" )
 
-if(MSVC)
-    set(CMAKE_CXX_FLAGS "/EHsc")
-endif()
-
 add_library(protomsgs STATIC ${ProtoSources} ${ProtoHeaders} ${ProtoIncludes} )
 #set linker properties. The first time around, the protobufs generated files may not exist
 # and CMAKE will not be able to determine the linker type.

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 07/10: DRILL-6456: Planner shouldn't create any exchanges on the right side of Lateral Join.

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit fde66d776cb16e77c1b3436c8a21364a43fb5f8f
Author: Hanumath Rao Maduri <ha...@gmail.com>
AuthorDate: Wed May 30 17:59:51 2018 -0700

    DRILL-6456: Planner shouldn't create any exchanges on the right side of Lateral Join.
    
    This closes #1299
---
 .../visitor/ExcessiveExchangeIdentifier.java       |  72 ++++++++-
 .../impl/lateraljoin/TestLateralPlans.java         | 164 ++++++++++++++++++++-
 2 files changed, 227 insertions(+), 9 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java
index 7bfe214..b4ed5e0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/ExcessiveExchangeIdentifier.java
@@ -19,8 +19,8 @@ package org.apache.drill.exec.planner.physical.visitor;
 
 import java.util.Collections;
 import java.util.List;
-
 import org.apache.drill.exec.planner.fragment.DistributionAffinity;
+import org.apache.drill.exec.planner.physical.CorrelatePrel;
 import org.apache.drill.exec.planner.physical.ExchangePrel;
 import org.apache.drill.exec.planner.physical.Prel;
 import org.apache.drill.exec.planner.physical.ScanPrel;
@@ -28,9 +28,11 @@ import org.apache.drill.exec.planner.physical.ScreenPrel;
 import org.apache.calcite.rel.RelNode;
 
 import com.google.common.collect.Lists;
+import org.apache.drill.exec.planner.physical.UnnestPrel;
 
 public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, ExcessiveExchangeIdentifier.MajorFragmentStat, RuntimeException> {
   private final long targetSliceSize;
+  private CorrelatePrel topMostLateralJoin = null;
 
   public ExcessiveExchangeIdentifier(long targetSliceSize) {
     this.targetSliceSize = targetSliceSize;
@@ -45,18 +47,28 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
   public Prel visitExchange(ExchangePrel prel, MajorFragmentStat parent) throws RuntimeException {
     parent.add(prel);
     MajorFragmentStat newFrag = new MajorFragmentStat();
+    newFrag.setRightSideOfLateral(parent.isRightSideOfLateral());
     Prel newChild = ((Prel) prel.getInput()).accept(this, newFrag);
-
-    if (newFrag.isSingular() && parent.isSingular() &&
-        // if one of them has strict distribution or none, we can remove the exchange
-        (!newFrag.isDistributionStrict() || !parent.isDistributionStrict())
-        ) {
+    if (canRemoveExchange(parent, newFrag)) {
       return newChild;
     } else {
       return (Prel) prel.copy(prel.getTraitSet(), Collections.singletonList((RelNode) newChild));
     }
   }
 
+  private boolean canRemoveExchange(MajorFragmentStat parentFrag, MajorFragmentStat childFrag) {
+    if (childFrag.isSingular() && parentFrag.isSingular() &&
+       (!childFrag.isDistributionStrict() || !parentFrag.isDistributionStrict())) {
+      return true;
+    }
+
+    if (parentFrag.isRightSideOfLateral()) {
+      return true;
+    }
+
+    return false;
+  }
+
   @Override
   public Prel visitScreen(ScreenPrel prel, MajorFragmentStat s) throws RuntimeException {
     s.addScreen(prel);
@@ -71,6 +83,40 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
   }
 
   @Override
+  public Prel visitCorrelate(CorrelatePrel prel, MajorFragmentStat s) throws RuntimeException {
+    List<RelNode> children = Lists.newArrayList();
+    s.add(prel);
+
+    for (Prel p : prel) {
+      s.add(p);
+    }
+
+    // Traverse the left side of the Lateral join first. Left side of the
+    // Lateral shouldn't have any restrictions on Exchanges.
+    children.add(((Prel)prel.getInput(0)).accept(this, s));
+    // Save the outermost Lateral join so as to unset the flag later.
+    if (topMostLateralJoin == null) {
+      topMostLateralJoin = prel;
+    }
+
+    // Right side of the Lateral shouldn't have any Exchanges. Hence set the
+    // flag so that visitExchange removes the exchanges.
+    s.setRightSideOfLateral(true);
+    children.add(((Prel)prel.getInput(1)).accept(this, s));
+    if (topMostLateralJoin == prel) {
+      topMostLateralJoin = null;
+      s.setRightSideOfLateral(false);
+    }
+    return (Prel) prel.copy(prel.getTraitSet(), children);
+  }
+
+  @Override
+  public Prel visitUnnest(UnnestPrel prel, MajorFragmentStat s) throws RuntimeException {
+    s.addUnnest(prel);
+    return prel;
+  }
+
+  @Override
   public Prel visitPrel(Prel prel, MajorFragmentStat s) throws RuntimeException {
     List<RelNode> children = Lists.newArrayList();
     s.add(prel);
@@ -98,6 +144,7 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
     private double maxRows = 0d;
     private int maxWidth = Integer.MAX_VALUE;
     private boolean isMultiSubScan = false;
+    private boolean rightSideOfLateral = false;
 
     public void add(Prel prel) {
       maxRows = Math.max(prel.estimateRowCount(prel.getCluster().getMetadataQuery()), maxRows);
@@ -130,9 +177,20 @@ public class ExcessiveExchangeIdentifier extends BasePrelVisitor<Prel, Excessive
       return w == 1;
     }
 
+    public boolean isRightSideOfLateral() {
+      return this.rightSideOfLateral;
+    }
+
+    public void addUnnest(UnnestPrel prel) {
+      add(prel);
+    }
+
+    public void setRightSideOfLateral(boolean rightSideOfLateral) {
+      this.rightSideOfLateral = rightSideOfLateral;
+    }
+
     public boolean isDistributionStrict() {
       return distributionAffinity == DistributionAffinity.HARD;
     }
   }
-
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestLateralPlans.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestLateralPlans.java
index 9e19729..00ab971 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestLateralPlans.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/lateraljoin/TestLateralPlans.java
@@ -18,10 +18,13 @@
 package org.apache.drill.exec.physical.impl.lateraljoin;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
@@ -30,10 +33,18 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.Ignore;
 
+import java.nio.file.Paths;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
 public class TestLateralPlans extends BaseTestQuery {
+  private static final String regularTestFile_1 = "cust_order_10_1.json";
+  private static final String regularTestFile_2 = "cust_order_10_2.json";
 
   @BeforeClass
   public static void enableUnnestLateral() throws Exception {
+    dirTestWatcher.copyResourceToRoot(Paths.get("lateraljoin", "multipleFiles", regularTestFile_1));
+    dirTestWatcher.copyResourceToRoot(Paths.get("lateraljoin", "multipleFiles", regularTestFile_2));
     test("alter session set `planner.enable_unnest_lateral`=true");
   }
 
@@ -255,7 +266,7 @@ public class TestLateralPlans extends BaseTestQuery {
           .sql(Sql)
           .run();
     } catch (UserRemoteException ex) {
-      assert(ex.getMessage().contains("Alias table and column name are required for UNNEST"));
+      assertTrue(ex.getMessage().contains("Alias table and column name are required for UNNEST"));
     }
   }
 
@@ -272,7 +283,156 @@ public class TestLateralPlans extends BaseTestQuery {
           .sql(Sql)
           .run();
     } catch (UserRemoteException ex) {
-      assert(ex.getMessage().contains("Alias table and column name are required for UNNEST"));
+      assertTrue(ex.getMessage().contains("Alias table and column name are required for UNNEST"));
+    }
+  }
+
+  /***********************************************************************************************
+   Following test cases are introduced to make sure no exchanges are present on right side of
+   Lateral join.
+   **********************************************************************************************/
+
+  @Test
+  public void testNoExchangeWithAggWithoutGrpBy() throws Exception {
+    String Sql = "select d1.totalprice from dfs.`lateraljoin/multipleFiles` t," +
+            " lateral ( select sum(t2.ord.o_totalprice) as totalprice from unnest(t.c_orders) t2(ord)) d1";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+  @Test
+  public void testNoExchangeWithStreamAggWithGrpBy() throws Exception {
+    String Sql = "select d1.totalprice from dfs.`lateraljoin/multipleFiles` t," +
+            " lateral ( select sum(t2.ord.o_totalprice) as totalprice from unnest(t.c_orders) t2(ord) group by t2.ord.o_orderkey) d1";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1)
+            .setOptionDefault(PlannerSettings.HASHAGG.getOptionName(), false)
+            .setOptionDefault(PlannerSettings.STREAMAGG.getOptionName(), true);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
     }
   }
+
+  @Test
+  public void testNoExchangeWithHashAggWithGrpBy() throws Exception {
+    String Sql = "select d1.totalprice from dfs.`lateraljoin/multipleFiles` t," +
+            " lateral ( select sum(t2.ord.o_totalprice) as totalprice from unnest(t.c_orders) t2(ord) group by t2.ord.o_orderkey) d1";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1)
+            .setOptionDefault(PlannerSettings.HASHAGG.getOptionName(), true)
+            .setOptionDefault(PlannerSettings.STREAMAGG.getOptionName(), false);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+  @Test
+  public void testNoExchangeWithOrderByWithoutLimit() throws Exception {
+    String Sql = "select d1.totalprice from dfs.`lateraljoin/multipleFiles` t," +
+            " lateral ( select t2.ord.o_totalprice as totalprice from unnest(t.c_orders) t2(ord) order by t2.ord.o_orderkey) d1";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+  @Test
+  public void testNoExchangeWithOrderByLimit() throws Exception {
+    String Sql = "select d1.totalprice from dfs.`lateraljoin/multipleFiles` t," +
+            " lateral ( select t2.ord.o_totalprice as totalprice from unnest(t.c_orders) t2(ord) order by t2.ord.o_orderkey limit 10) d1";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+
+  @Test
+  public void testNoExchangeWithLateralsDownStreamJoin() throws Exception {
+    String Sql = "select d1.totalprice from dfs.`lateraljoin/multipleFiles` t, dfs.`lateraljoin/multipleFiles` t2, " +
+            " lateral ( select t2.ord.o_totalprice as totalprice from unnest(t.c_orders) t2(ord) order by t2.ord.o_orderkey limit 10) d1" +
+            " where t.c_name = t2.c_name";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+  @Test
+  public void testNoExchangeWithLateralsDownStreamUnion() throws Exception {
+    String Sql = "select t.c_name from dfs.`lateraljoin/multipleFiles` t union all " +
+            " select t.c_name from dfs.`lateraljoin/multipleFiles` t, " +
+                    " lateral ( select t2.ord.o_totalprice as totalprice from unnest(t.c_orders) t2(ord) order by t2.ord.o_orderkey limit 10) d1";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+  @Test
+  public void testNoExchangeWithLateralsDownStreamAgg() throws Exception {
+    String Sql = "select sum(d1.totalprice) from dfs.`lateraljoin/multipleFiles` t, " +
+            " lateral ( select t2.ord.o_totalprice as totalprice from unnest(t.c_orders) t2(ord) order by t2.ord.o_orderkey limit 10) d1 group by t.c_custkey";
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+            .setOptionDefault(ExecConstants.ENABLE_UNNEST_LATERAL_KEY, true)
+            .setOptionDefault(ExecConstants.SLICE_TARGET, 1)
+            .setOptionDefault(PlannerSettings.HASHAGG.getOptionName(), false)
+            .setOptionDefault(PlannerSettings.STREAMAGG.getOptionName(), true);
+
+    try (ClusterFixture cluster = builder.build();
+         ClientFixture client = cluster.clientFixture()) {
+      String explain = client.queryBuilder().sql(Sql).explainText();
+      String rightChild = getRightChildOfLateral(explain);
+      assertFalse(rightChild.contains("Exchange"));
+    }
+  }
+
+  private String getRightChildOfLateral(String explain) throws Exception {
+    Matcher matcher = Pattern.compile("Correlate.*Unnest", Pattern.MULTILINE | Pattern.DOTALL).matcher(explain);
+    assertTrue (matcher.find());
+    String CorrelateUnnest = matcher.group(0);
+    return CorrelateUnnest.substring(CorrelateUnnest.lastIndexOf("Scan"));
+  }
 }

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 08/10: DRILL-4020: The not-equal operator returns incorrect results when used on the HBase row key

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 0d5eda83fe34928ff60629e6a4903d43a1d82582
Author: Akihiko Kusanagi <ak...@mapr.com>
AuthorDate: Wed Nov 4 21:33:06 2015 +0900

    DRILL-4020: The not-equal operator returns incorrect results when used on the HBase row key
    
    - Added a condition that checks if the filter to the scan specification doesn't have NOT_EQUAL operator
    - Added testFilterPushDownRowKeyNotEqual() to TestHBaseFilterPushDown
    
    This closes #309
---
 .../drill/exec/store/hbase/HBaseFilterBuilder.java     |  1 +
 .../apache/drill/hbase/TestHBaseFilterPushDown.java    | 18 ++++++++++++++++++
 2 files changed, 19 insertions(+)

diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java
index 8d2e8ff..6e1efe5 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseFilterBuilder.java
@@ -61,6 +61,7 @@ public class HBaseFilterBuilder extends AbstractExprVisitor<HBaseScanSpec, Void,
        * remove it since its effect is also achieved through startRow and stopRow.
        */
       if (parsedSpec.filter instanceof RowFilter &&
+          ((RowFilter)parsedSpec.filter).getOperator() != CompareOp.NOT_EQUAL &&
           ((RowFilter)parsedSpec.filter).getComparator() instanceof BinaryComparator) {
         parsedSpec.filter = null;
       }
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
index 0e14cb1..e70cf08 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/TestHBaseFilterPushDown.java
@@ -45,6 +45,24 @@ public class TestHBaseFilterPushDown extends BaseHBaseTest {
   }
 
   @Test
+  public void testFilterPushDownRowKeyNotEqual() throws Exception {
+    setColumnWidths(new int[] {8, 38, 38});
+    final String sql = "SELECT\n"
+        + "  *\n"
+        + "FROM\n"
+        + "  hbase.`[TABLE_NAME]` tableName\n"
+        + "WHERE\n"
+        + "  row_key <> 'b4'";
+
+    runHBaseSQLVerifyCount(sql, 6);
+
+    final String[] expectedPlan = {".*startRow=, stopRow=, filter=RowFilter \\(NOT_EQUAL, b4\\).*"};
+    final String[] excludedPlan ={};
+    final String sqlHBase = canonizeHBaseSQL(sql);
+    PlanTestBase.testPlanMatchingPatterns(sqlHBase, expectedPlan, excludedPlan);
+  }
+
+  @Test
   public void testFilterPushDownRowKeyEqualWithItem() throws Exception {
     setColumnWidths(new int[] {20, 30});
     final String sql = "SELECT\n"

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 01/10: DRILL-4091: Adding support for additional gis operations in gis contrib module

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit eded745d4647a15716fa570a69b660385d0f0ee7
Author: potocki <k2...@gmx.com>
AuthorDate: Mon Nov 16 14:05:18 2015 +0100

    DRILL-4091: Adding support for additional gis operations in gis contrib module
    
    This closes #1201
---
 contrib/gis/pom.xml                                |   6 +-
 contrib/gis/sample-data/CA-cities-with-nulls.csv   |   3 +
 contrib/gis/sample-data/polygons.tsv               |   5 +
 .../com/esri/core/geometry/VertexGeomAccessor.java |  25 +++
 .../gis/{STGeomFromTextSrid.java => STBuffer.java} |  33 +--
 .../gis/{STGeomFromText.java => STContains.java}   |  36 ++--
 .../gis/{STGeomFromText.java => STCrosses.java}    |  35 ++--
 .../{STGeomFromTextSrid.java => STDifference.java} |  36 ++--
 .../gis/{STGeomFromText.java => STDisjoint.java}   |  35 ++--
 .../gis/{STGeomFromText.java => STDistance.java}   |  36 ++--
 .../{STGeomFromTextSrid.java => STEnvelope.java}   |  39 ++--
 .../gis/{STGeomFromText.java => STEquals.java}     |  35 ++--
 .../exec/expr/fn/impl/gis/STGeomFromText.java      |   4 +-
 .../exec/expr/fn/impl/gis/STGeomFromTextSrid.java  |   2 +-
 .../gis/{STGeomFromText.java => STIntersects.java} |  36 ++--
 .../gis/{STGeomFromText.java => STOverlaps.java}   |  35 ++--
 .../drill/exec/expr/fn/impl/gis/STRelate.java      |  73 +++++++
 .../gis/{STGeomFromText.java => STTouches.java}    |  35 ++--
 .../drill/exec/expr/fn/impl/gis/STTransform.java   | 114 +++++++++++
 .../gis/{STGeomFromTextSrid.java => STUnion.java}  |  35 ++--
 .../exec/expr/fn/impl/gis/STUnionAggregate.java    | 115 +++++++++++
 .../impl/gis/{STGeomFromText.java => STXFunc.java} |  30 +--
 .../impl/gis/{STGeomFromText.java => STXMax.java}  |  39 ++--
 .../impl/gis/{STGeomFromText.java => STXMin.java}  |  39 ++--
 .../impl/gis/{STGeomFromText.java => STYFunc.java} |  30 +--
 .../impl/gis/{STGeomFromText.java => STYMax.java}  |  39 ++--
 .../impl/gis/{STGeomFromText.java => STYMin.java}  |  39 ++--
 .../expr/fn/impl/gis/TestGeometryFunctions.java    | 228 ++++++++++++++++++++-
 28 files changed, 903 insertions(+), 314 deletions(-)

diff --git a/contrib/gis/pom.xml b/contrib/gis/pom.xml
index 30a0924..96f694a 100644
--- a/contrib/gis/pom.xml
+++ b/contrib/gis/pom.xml
@@ -45,7 +45,11 @@
 			<artifactId>esri-geometry-api</artifactId>
 			<version>2.0.0</version>
 		</dependency>
-
+		<dependency>
+		    <groupId>org.osgeo</groupId>
+		    <artifactId>proj4j</artifactId>
+		    <version>0.1.0</version>
+		</dependency>
 		<!-- Test dependencies -->
 		<dependency>
 			<groupId>org.apache.drill.exec</groupId>
diff --git a/contrib/gis/sample-data/CA-cities-with-nulls.csv b/contrib/gis/sample-data/CA-cities-with-nulls.csv
new file mode 100644
index 0000000..dcd67ec
--- /dev/null
+++ b/contrib/gis/sample-data/CA-cities-with-nulls.csv
@@ -0,0 +1,3 @@
+US,6,"Acampo",
+US,6,"Agnew",POINT(-121.9591252 37.3946626)
+US,6,"Agua Caliente",POINT(-122.4880366 38.3240804)
\ No newline at end of file
diff --git a/contrib/gis/sample-data/polygons.tsv b/contrib/gis/sample-data/polygons.tsv
new file mode 100644
index 0000000..f70c922
--- /dev/null
+++ b/contrib/gis/sample-data/polygons.tsv
@@ -0,0 +1,5 @@
+1	POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))
+1	POLYGON((0 0, 0 -1, 1 -1, 1 0, 0 0))
+2	POLYGON((10 10, 10 11, 11 11, 11 10, 10 10))
+2	POLYGON((10 10, 10 9, 11 9, 11 10, 10 10))
+2 
diff --git a/contrib/gis/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java b/contrib/gis/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java
new file mode 100644
index 0000000..c95b798
--- /dev/null
+++ b/contrib/gis/src/main/java/com/esri/core/geometry/VertexGeomAccessor.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.esri.core.geometry;
+
+public class VertexGeomAccessor {
+  public static MultiVertexGeometry getVertexGeometry(Geometry geom){
+    return (MultiVertexGeometry) geom._getImpl();
+  }
+}
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STBuffer.java
similarity index 63%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STBuffer.java
index 055eb94..a1d3af4 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STBuffer.java
@@ -23,20 +23,23 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableIntHolder;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+* Returns a geometry that represents all points whose distance from this Geometry
+* is less than or equal to radius
+*/
+@FunctionTemplate(name = "st_buffer", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromTextSrid implements DrillSimpleFunc {
+public class STBuffer implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Param
-  NullableIntHolder sridParam;
+  Float8Holder bufferRadiusParam;
 
   @Output
   VarBinaryHolder out;
@@ -48,21 +51,19 @@ public class STGeomFromTextSrid implements DrillSimpleFunc {
   }
 
   public void eval() {
-    int srid = sridParam.value;
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
+    double bufferRadius = bufferRadiusParam.value;
 
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-    geom.setSpatialReference(com.esri.core.geometry.SpatialReference.create(srid));
+    com.esri.core.geometry.ogc.OGCGeometry bufferedGeom = geom1.buffer(bufferRadius);
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    java.nio.ByteBuffer bufferedGeomBytes = bufferedGeom.asBinary();
 
-    int outputSize = pointBytes.remaining();
+    int outputSize = bufferedGeomBytes.remaining();
     buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
     out.start = 0;
     out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
-    }
+    buffer.setBytes(0, bufferedGeomBytes);
+  }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STContains.java
similarity index 59%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STContains.java
index 3a613e1..5204ce6 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STContains.java
@@ -23,19 +23,26 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ Returns true if and only if no points of B lie in the exterior of A,
+ and at least one point of the interior of B lies in the interior of A.
+*/
+@FunctionTemplate(name = "st_contains", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STContains implements DrillSimpleFunc {
+  @Param
+  VarBinaryHolder geom1Param;
+
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +51,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int contains = geom1.contains(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = contains;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STCrosses.java
similarity index 61%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STCrosses.java
index 3a613e1..95d7aca 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STCrosses.java
@@ -23,19 +23,25 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns TRUE if the supplied geometries have some, but not all, interior points in common
+ */
+@FunctionTemplate(name = "st_crosses", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STCrosses implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
+
+  @Param
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +50,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int crosses = geom1.crosses(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = crosses;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDifference.java
similarity index 59%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDifference.java
index 055eb94..4906880 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDifference.java
@@ -23,20 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableIntHolder;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Given geometries A and B, this function returns a geometry that represents
+ * the part of geometry A that does not intersect with geometry B
+ */
+@FunctionTemplate(name = "st_difference", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromTextSrid implements DrillSimpleFunc {
+public class STDifference implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Param
-  NullableIntHolder sridParam;
+  VarBinaryHolder geom2Param;
 
   @Output
   VarBinaryHolder out;
@@ -48,21 +50,21 @@ public class STGeomFromTextSrid implements DrillSimpleFunc {
   }
 
   public void eval() {
-    int srid = sridParam.value;
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry diffGeom = geom1.difference(geom2);
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-    geom.setSpatialReference(com.esri.core.geometry.SpatialReference.create(srid));
+    java.nio.ByteBuffer bufferedGeomBytes = diffGeom.asBinary();
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
-
-    int outputSize = pointBytes.remaining();
+    int outputSize = bufferedGeomBytes.remaining();
     buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
     out.start = 0;
     out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
-    }
+    buffer.setBytes(0, bufferedGeomBytes);
+  }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDisjoint.java
similarity index 61%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDisjoint.java
index 3a613e1..8a34241 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDisjoint.java
@@ -23,19 +23,25 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns TRUE if two Geometries do not "spatially intersect" - if they do not share any space
+ */
+@FunctionTemplate(name = "st_disjoint", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STDisjoint implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
+
+  @Param
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +50,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int isDisjoint = geom1.disjoint(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = isDisjoint;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDistance.java
similarity index 58%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDistance.java
index 3a613e1..9415f39 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STDistance.java
@@ -23,19 +23,26 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * For geometry type Returns the 2D Cartesian distance between two geometries in projected units (based on spatial ref).
+ * For geography type defaults to return minimum geodesic distance between two geographies in meters
+ */
+@FunctionTemplate(name = "st_distance", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STDistance implements DrillSimpleFunc {
+  @Param
+  VarBinaryHolder geom1Param;
+
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +51,14 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = geom1.distance(geom2);
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEnvelope.java
similarity index 60%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEnvelope.java
index 055eb94..8286224 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEnvelope.java
@@ -23,20 +23,19 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableIntHolder;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns a geometry representing the double precision (float8) bounding box of the supplied geometry.
+ * The polygon is defined by the corner points of the bounding box ((MINX, MINY), (MINX, MAXY), (MAXX, MAXY), (MAXX, MINY), (MINX, MINY))
+ */
+@FunctionTemplate(name = "st_envelope", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromTextSrid implements DrillSimpleFunc {
-  @Param
-  NullableVarCharHolder input;
-
+public class STEnvelope implements DrillSimpleFunc {
   @Param
-  NullableIntHolder sridParam;
+  VarBinaryHolder geom1Param;
 
   @Output
   VarBinaryHolder out;
@@ -48,21 +47,23 @@ public class STGeomFromTextSrid implements DrillSimpleFunc {
   }
 
   public void eval() {
-    int srid = sridParam.value;
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-    geom.setSpatialReference(com.esri.core.geometry.SpatialReference.create(srid));
+    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
+    if (geom1.geometryType().equals("Point")) {
+      envelopeGeom = geom1;
+    } else {
+      envelopeGeom = geom1.envelope();
+    }
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    java.nio.ByteBuffer envelopeGeomBytes = envelopeGeom.asBinary();
 
-    int outputSize = pointBytes.remaining();
+    int outputSize = envelopeGeomBytes.remaining();
     buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
     out.start = 0;
     out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
-    }
+    buffer.setBytes(0, envelopeGeomBytes);
+  }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEquals.java
similarity index 61%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEquals.java
index 3a613e1..b5e22d6 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STEquals.java
@@ -23,19 +23,25 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns true if the given geometries represent the same geometry. Directionality is ignored
+ */
+@FunctionTemplate(name = "st_equals", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STEquals implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
+
+  @Param
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +50,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int equals = geom1.equals(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = equals;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
index 3a613e1..042046e 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
@@ -45,12 +45,10 @@ public class STGeomFromText implements DrillSimpleFunc {
 
   public void eval() {
     String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
+            input.buffer);
 
     com.esri.core.geometry.ogc.OGCGeometry geom;
-
     geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
     java.nio.ByteBuffer pointBytes = geom.asBinary();
 
     int outputSize = pointBytes.remaining();
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
index 055eb94..9a7432e 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
@@ -64,5 +64,5 @@ public class STGeomFromTextSrid implements DrillSimpleFunc {
     out.start = 0;
     out.end = outputSize;
     buffer.setBytes(0, pointBytes);
-    }
+  }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STIntersects.java
similarity index 59%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STIntersects.java
index 3a613e1..9e152c5 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STIntersects.java
@@ -23,19 +23,25 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns TRUE if the Geometries/Geography "spatially intersect in 2D" - (share any portion of space) and FALSE if they don't (they are Disjoint)
+ */
+@FunctionTemplate(name = "st_intersects", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STIntersects implements DrillSimpleFunc {
+  @Param
+  VarBinaryHolder geom1Param;
+
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +50,15 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int intersects = geom1.intersects(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = intersects;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STOverlaps.java
similarity index 60%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STOverlaps.java
index 3a613e1..7ae770d 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STOverlaps.java
@@ -23,19 +23,25 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns TRUE if the Geometries share space, are of the same dimension, but are not completely contained by each other
+ */
+@FunctionTemplate(name = "st_overlaps", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STOverlaps implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
+
+  @Param
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +50,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int overlaps = geom1.overlaps(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = overlaps;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java
new file mode 100644
index 0000000..9a1c64b
--- /dev/null
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STRelate.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn.impl.gis;
+
+import javax.inject.Inject;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.BitHolder;
+import org.apache.drill.exec.expr.holders.VarBinaryHolder;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+
+import io.netty.buffer.DrillBuf;
+
+/*
+ *  Returns true if this Geometry is spatially related to anotherGeometry, by testing for intersections between
+ *  the Interior, Boundary and Exterior of the two geometries as specified by the values in the intersectionMatrixPattern.
+ *  If no intersectionMatrixPattern is passed in, then returns the maximum intersectionMatrixPattern that relates the 2 geometries
+ */
+@FunctionTemplate(name = "st_relate", scope = FunctionTemplate.FunctionScope.SIMPLE,
+  nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+public class STRelate implements DrillSimpleFunc {
+  @Param
+  VarBinaryHolder geom1Param;
+
+  @Param
+  VarBinaryHolder geom2Param;
+
+  @Param
+  VarCharHolder matrixParam;
+
+  @Output
+  BitHolder out;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+  }
+
+  public void eval() {
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
+    String matrix = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(matrixParam.start,
+        matrixParam.end, matrixParam.buffer);
+
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
+
+    int relates = geom1.relate(geom2, matrix) ? 1 : 0;
+
+    out.value = relates;
+  }
+}
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTouches.java
similarity index 61%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTouches.java
index 3a613e1..79ad0c7 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTouches.java
@@ -23,19 +23,25 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns TRUE if the geometries have at least one point in common, but their interiors do not intersect
+ */
+@FunctionTemplate(name = "st_touches", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STTouches implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
+
+  @Param
+  VarBinaryHolder geom2Param;
 
   @Output
-  VarBinaryHolder out;
+  BitHolder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +50,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    int touches = geom1.touches(geom2) ? 1 : 0;
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    out.value = touches;
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java
new file mode 100644
index 0000000..c2788c1
--- /dev/null
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STTransform.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn.impl.gis;
+
+import javax.inject.Inject;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import org.apache.drill.exec.expr.holders.NullableIntHolder;
+import org.apache.drill.exec.expr.holders.VarBinaryHolder;
+import org.osgeo.proj4j.CRSFactory;
+import org.osgeo.proj4j.CoordinateTransform;
+
+import io.netty.buffer.DrillBuf;
+
+/*
+ * Return a new geometry with its coordinates transformed to a different spatial reference
+ */
+@FunctionTemplate(name = "st_transform", scope = FunctionTemplate.FunctionScope.SIMPLE,
+  nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
+public class STTransform implements DrillSimpleFunc {
+  @Param
+  VarBinaryHolder geom1Param;
+
+  @Param
+  NullableIntHolder sridSrcParam;
+
+  @Param
+  NullableIntHolder sridTgtParam;
+
+  @Workspace
+  CoordinateTransform transform;
+
+  @Workspace
+  CRSFactory crsFactory;
+
+  @Workspace
+  int sridTgt;
+
+  @Output
+  VarBinaryHolder out;
+
+  @Inject
+  DrillBuf buffer;
+
+  public void setup() {
+    int sridSrc = sridSrcParam.value;
+    sridTgt = sridTgtParam.value;
+
+    org.osgeo.proj4j.CoordinateReferenceSystem srcCrs =
+        new org.osgeo.proj4j.CRSFactory().createFromName("EPSG:" + sridSrc);
+
+    org.osgeo.proj4j.CoordinateReferenceSystem tgtCrs =
+        new org.osgeo.proj4j.CRSFactory().createFromName("EPSG:" + sridTgt);
+
+    transform = new org.osgeo.proj4j.BasicCoordinateTransform(srcCrs, tgtCrs);
+  }
+
+  public void eval() {
+    com.esri.core.geometry.ogc.OGCGeometry geomSrc = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+
+    //int code = Integer.parseInt(transform.getTargetCRS().getName().substring(5, 9));//now sridTgt
+    org.osgeo.proj4j.ProjCoordinate result = new org.osgeo.proj4j.ProjCoordinate();
+    com.esri.core.geometry.SpatialReference sr = com.esri.core.geometry.SpatialReference.create(sridTgt);
+    java.nio.ByteBuffer geomBytes = null;
+
+    if (geomSrc != null && geomSrc.geometryType().equals("Point")) {
+      com.esri.core.geometry.ogc.OGCPoint pointGeom = (com.esri.core.geometry.ogc.OGCPoint) geomSrc;
+      result = transform.transform(new org.osgeo.proj4j.ProjCoordinate(pointGeom.X(), pointGeom.Y()), result);
+
+      geomBytes = new com.esri.core.geometry.ogc.OGCPoint(
+          new com.esri.core.geometry.Point(result.x, result.y), sr).asBinary();
+    } else {
+      com.esri.core.geometry.Geometry esriGeom = geomSrc.getEsriGeometry();
+      com.esri.core.geometry.MultiVertexGeometry vertexGeom =
+          com.esri.core.geometry.VertexGeomAccessor.getVertexGeometry(esriGeom);
+      for (int i = 0; i < vertexGeom.getPointCount(); i++) {
+        com.esri.core.geometry.Point point = vertexGeom.getPoint(i);
+        result = transform.transform(new org.osgeo.proj4j.ProjCoordinate(point.getX(), point.getY()), result);
+        point.setXY(result.x, result.y);
+        vertexGeom.setPoint(i, point);
+      }
+
+      com.esri.core.geometry.ogc.OGCGeometry tGeom =
+          com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry(esriGeom, sr);
+      geomBytes = tGeom.asBinary();
+    }
+
+    int outputSize = geomBytes.remaining();
+    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
+    out.start = 0;
+    out.end = outputSize;
+    buffer.setBytes(0, geomBytes);
+  }
+}
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnion.java
similarity index 61%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnion.java
index 055eb94..907c6da 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromTextSrid.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnion.java
@@ -23,20 +23,21 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableIntHolder;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns a geometry that represents the point set union of the Geometries
+ */
+@FunctionTemplate(name = "st_union", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromTextSrid implements DrillSimpleFunc {
+public class STUnion implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Param
-  NullableIntHolder sridParam;
+  VarBinaryHolder geom2Param;
 
   @Output
   VarBinaryHolder out;
@@ -48,21 +49,21 @@ public class STGeomFromTextSrid implements DrillSimpleFunc {
   }
 
   public void eval() {
-    int srid = sridParam.value;
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    com.esri.core.geometry.ogc.OGCGeometry geom2;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+    geom2 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom2Param.buffer.nioBuffer(geom2Param.start, geom2Param.end - geom2Param.start));
 
-    com.esri.core.geometry.ogc.OGCGeometry geom;
+    com.esri.core.geometry.ogc.OGCGeometry unionGeom = geom1.union(geom2);
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-    geom.setSpatialReference(com.esri.core.geometry.SpatialReference.create(srid));
+    java.nio.ByteBuffer bufferedGeomBytes = unionGeom.asBinary();
 
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
-
-    int outputSize = pointBytes.remaining();
+    int outputSize = bufferedGeomBytes.remaining();
     buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
     out.start = 0;
     out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
-    }
+    buffer.setBytes(0, bufferedGeomBytes);
+  }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java
new file mode 100644
index 0000000..bcb7615
--- /dev/null
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STUnionAggregate.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn.impl.gis;
+
+import javax.inject.Inject;
+
+import org.apache.drill.exec.expr.DrillAggFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.annotations.Workspace;
+import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.IntHolder;
+import org.apache.drill.exec.expr.holders.NullableVarBinaryHolder;
+import org.apache.drill.exec.expr.holders.ObjectHolder;
+import org.apache.drill.exec.expr.holders.UInt1Holder;
+
+import io.netty.buffer.DrillBuf;
+
+/*
+ * Returns a geometry that represents the point set union of the Geometries
+ */
+@FunctionTemplate(name = "st_unionaggregate", scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
+public class STUnionAggregate implements DrillAggFunc {
+  @Param NullableVarBinaryHolder in;
+  @Workspace ObjectHolder value;
+  @Workspace UInt1Holder init;
+  @Workspace BigIntHolder nonNullCount;
+  @Workspace IntHolder srid;
+  @Inject DrillBuf buf;
+  @Output NullableVarBinaryHolder out;
+
+  public void setup() {
+    init = new UInt1Holder();
+    nonNullCount = new BigIntHolder();
+    nonNullCount.value = 0;
+    init.value = 0;
+    value = new ObjectHolder();
+    value.obj = new java.util.ArrayList<com.esri.core.geometry.Geometry>();
+  }
+
+  @Override
+  public void add() {
+    sout: {
+      if (in.isSet == 0) {
+        // processing nullable input and the value is null, so don't do anything...
+        break sout;
+      }
+      nonNullCount.value = 1;
+      java.util.ArrayList<com.esri.core.geometry.Geometry> tmp = (java.util.ArrayList<com.esri.core.geometry.Geometry>) value.obj;
+
+      com.esri.core.geometry.ogc.OGCGeometry geom;
+      geom = com.esri.core.geometry.ogc.OGCGeometry
+          .fromBinary(in.buffer.nioBuffer(in.start, in.end - in.start));
+
+      tmp.add(geom.getEsriGeometry());
+
+      if(init.value == 0) {
+        init.value = 1;
+        srid.value = geom.SRID();
+      }
+    } // end of sout block
+  }
+
+  @Override
+  public void output() {
+    if (nonNullCount.value > 0) {
+      out.isSet = 1;
+
+      java.util.ArrayList<com.esri.core.geometry.Geometry> tmp = (java.util.ArrayList<com.esri.core.geometry.Geometry>) value.obj;
+
+      com.esri.core.geometry.SpatialReference spatialRef = null;
+      if (srid.value != 0){
+        spatialRef = com.esri.core.geometry.SpatialReference.create(4326);
+      }
+      com.esri.core.geometry.Geometry[] geomArr =
+          (com.esri.core.geometry.Geometry[]) tmp.toArray( new com.esri.core.geometry.Geometry[0] );
+      com.esri.core.geometry.Geometry geom = com.esri.core.geometry.GeometryEngine.union(geomArr, spatialRef);
+
+      com.esri.core.geometry.ogc.OGCGeometry unionGeom = com.esri.core.geometry.ogc.OGCGeometry.createFromEsriGeometry(geom, spatialRef);
+      java.nio.ByteBuffer unionGeomBytes = unionGeom.asBinary();
+
+      int outputSize = unionGeomBytes.remaining();
+      buf = out.buffer = buf.reallocIfNeeded(outputSize);
+      out.start = 0;
+      out.end = outputSize;
+      buf.setBytes(0, unionGeomBytes);
+    } else {
+      out.isSet = 0;
+    }
+  }
+
+  @Override
+  public void reset() {
+    value = new ObjectHolder();
+    value.obj = new java.util.ArrayList<com.esri.core.geometry.Geometry>();
+    init.value = 0;
+    nonNullCount.value = 0;
+  }
+}
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXFunc.java
similarity index 66%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXFunc.java
index 3a613e1..8986bbc 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXFunc.java
@@ -23,19 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Return the X coordinate of the point, or NaN if not available
+ */
+@FunctionTemplate(name = "st_x", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STXFunc implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geomParam;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +47,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
 
     com.esri.core.geometry.ogc.OGCGeometry geom;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    geom = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geomParam.buffer.nioBuffer(geomParam.start, geomParam.end - geomParam.start));
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    if (geom != null && geom.geometryType().equals("Point")) {
+      out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom).X();
+    } else {
+      out.value = Double.NaN;
+    }
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMax.java
similarity index 60%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMax.java
index 3a613e1..ee8afa0 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMax.java
@@ -23,19 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns X maxima of a bounding box 2d or 3d or a geometry
+ */
+@FunctionTemplate(name = "st_xmax", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STXMax implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +47,17 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
-
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+
+    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
+    if (geom1.geometryType().equals("Point")) {
+      out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).X();
+    } else {
+      com.esri.core.geometry.Envelope envelope = new com.esri.core.geometry.Envelope();
+      geom1.getEsriGeometry().queryEnvelope(envelope);
+      out.value = envelope.getXMax();
+    }
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMin.java
similarity index 60%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMin.java
index 3a613e1..d527e6e 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STXMin.java
@@ -23,19 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns X minima of a bounding box 2d or 3d or a geometry
+ */
+@FunctionTemplate(name = "st_xmin", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STXMin implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +47,17 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
-
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+
+    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
+    if (geom1.geometryType().equals("Point")) {
+      out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).X();
+    } else {
+      com.esri.core.geometry.Envelope envelope = new com.esri.core.geometry.Envelope();
+      geom1.getEsriGeometry().queryEnvelope(envelope);
+      out.value = envelope.getXMin();
+    }
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYFunc.java
similarity index 66%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYFunc.java
index 3a613e1..ce1771a 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYFunc.java
@@ -23,19 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Return the Y coordinate of the point, or NaN if not available
+ */
+@FunctionTemplate(name = "st_y", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STYFunc implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geomParam;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +47,16 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
 
     com.esri.core.geometry.ogc.OGCGeometry geom;
 
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
+    geom = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geomParam.buffer.nioBuffer(geomParam.start, geomParam.end - geomParam.start));
 
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    if (geom != null && geom.geometryType().equals("Point")) {
+      out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom).Y();
+    } else {
+      out.value = Double.NaN;
+    }
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMax.java
similarity index 60%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMax.java
index 3a613e1..4101e79 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMax.java
@@ -23,19 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns Y maxima of a bounding box 2d or 3d or a geometry
+ */
+@FunctionTemplate(name = "st_ymax", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STYMax implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +47,17 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
-
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+
+    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
+    if (geom1.geometryType().equals("Point")) {
+      out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).Y();
+    } else {
+      com.esri.core.geometry.Envelope envelope = new com.esri.core.geometry.Envelope();
+      geom1.getEsriGeometry().queryEnvelope(envelope);
+      out.value = envelope.getYMax();
+    }
   }
 }
diff --git a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMin.java
similarity index 60%
copy from contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
copy to contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMin.java
index 3a613e1..85fc7e4 100644
--- a/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STGeomFromText.java
+++ b/contrib/gis/src/main/java/org/apache/drill/exec/expr/fn/impl/gis/STYMin.java
@@ -23,19 +23,22 @@ import org.apache.drill.exec.expr.DrillSimpleFunc;
 import org.apache.drill.exec.expr.annotations.FunctionTemplate;
 import org.apache.drill.exec.expr.annotations.Output;
 import org.apache.drill.exec.expr.annotations.Param;
-import org.apache.drill.exec.expr.holders.NullableVarCharHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
 import org.apache.drill.exec.expr.holders.VarBinaryHolder;
 
 import io.netty.buffer.DrillBuf;
 
-@FunctionTemplate(name = "st_geomfromtext", scope = FunctionTemplate.FunctionScope.SIMPLE,
+/*
+ * Returns Y minima of a bounding box 2d or 3d or a geometry
+ */
+@FunctionTemplate(name = "st_ymin", scope = FunctionTemplate.FunctionScope.SIMPLE,
   nulls = FunctionTemplate.NullHandling.NULL_IF_NULL)
-public class STGeomFromText implements DrillSimpleFunc {
+public class STYMin implements DrillSimpleFunc {
   @Param
-  NullableVarCharHolder input;
+  VarBinaryHolder geom1Param;
 
   @Output
-  VarBinaryHolder out;
+  Float8Holder out;
 
   @Inject
   DrillBuf buffer;
@@ -44,19 +47,17 @@ public class STGeomFromText implements DrillSimpleFunc {
   }
 
   public void eval() {
-    String wktText = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(input.start, input.end,
-        input.buffer);
-
-    com.esri.core.geometry.ogc.OGCGeometry geom;
-
-    geom = com.esri.core.geometry.ogc.OGCGeometry.fromText(wktText);
-
-    java.nio.ByteBuffer pointBytes = geom.asBinary();
-
-    int outputSize = pointBytes.remaining();
-    buffer = out.buffer = buffer.reallocIfNeeded(outputSize);
-    out.start = 0;
-    out.end = outputSize;
-    buffer.setBytes(0, pointBytes);
+    com.esri.core.geometry.ogc.OGCGeometry geom1;
+    geom1 = com.esri.core.geometry.ogc.OGCGeometry
+        .fromBinary(geom1Param.buffer.nioBuffer(geom1Param.start, geom1Param.end - geom1Param.start));
+
+    com.esri.core.geometry.ogc.OGCGeometry envelopeGeom;
+    if (geom1.geometryType().equals("Point")) {
+      out.value = ((com.esri.core.geometry.ogc.OGCPoint) geom1).Y();
+    } else {
+      com.esri.core.geometry.Envelope envelope = new com.esri.core.geometry.Envelope();
+      geom1.getEsriGeometry().queryEnvelope(envelope);
+      out.value = envelope.getYMin();
+    }
   }
 }
diff --git a/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java b/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java
index 8e5ead3..379de5c 100644
--- a/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java
+++ b/contrib/gis/src/test/java/org/apache/drill/exec/expr/fn/impl/gis/TestGeometryFunctions.java
@@ -29,7 +29,6 @@ public class TestGeometryFunctions extends BaseTestQuery {
 
   @Test
   public void testGeometryFromTextCreation() throws Exception {
-
     testBuilder()
     .sqlQuery("select ST_AsText(ST_GeomFromText('" + wktPoint + "')) "
         + "from cp.`sample-data/CA-cities.csv` limit 1")
@@ -41,7 +40,6 @@ public class TestGeometryFunctions extends BaseTestQuery {
 
   @Test
   public void testGeometryPointCreation() throws Exception {
-
     testBuilder()
       .sqlQuery("select ST_AsText(ST_Point(-121.895, 37.339)) "
           + "from cp.`sample-data/CA-cities.csv` limit 1")
@@ -76,6 +74,30 @@ public class TestGeometryFunctions extends BaseTestQuery {
   }
 
   @Test
+  public void testNullWkt() throws Exception {
+    testBuilder()
+      .sqlQuery("select ST_AsText(ST_GeomFromText(columns[4])) " +
+              "from cp.`/sample-data/CA-cities-with-nulls.csv` limit 1")
+      .ordered()
+      .baselineColumns("EXPR$0")
+      .baselineValues(new Object[]{null})
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testNullGeoJSON() throws Exception {
+    testBuilder()
+            .sqlQuery("select ST_AsGeoJson(ST_GeomFromText(columns[4])) " +
+                    "from cp.`/sample-data/CA-cities-with-nulls.csv` limit 1")
+            .ordered()
+            .baselineColumns("EXPR$0")
+            .baselineValues(new Object[]{null})
+            .build()
+            .run();
+  }
+
+  @Test
   public void testGeoJSONCreationFromPoint() throws Exception {
     testBuilder()
       .sqlQuery("select ST_AsGeoJSON(ST_Point(-121.895, 37.339)) "
@@ -86,7 +108,6 @@ public class TestGeometryFunctions extends BaseTestQuery {
       .build()
       .run();
   }
-
   @Test
   public void testGeoJSONCreationFromGeom() throws Exception {
     testBuilder()
@@ -101,7 +122,6 @@ public class TestGeometryFunctions extends BaseTestQuery {
 
   @Test
   public void testSTWithinQuery() throws Exception {
-
     testBuilder()
       .sqlQuery("select ST_Within(ST_Point(columns[4], columns[3]),"
           + "ST_GeomFromText('POLYGON((-121.95 37.28, -121.94 37.35, -121.84 37.35, -121.84 37.28, -121.95 37.28))')"
@@ -123,4 +143,204 @@ public class TestGeometryFunctions extends BaseTestQuery {
     .build()
     .run();
   }
+
+  @Test
+  public void testSTXQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("select ST_X(ST_Point(-121.895, 37.339)) "
+          + "from cp.`/sample-data/CA-cities.csv` limit 1")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(-121.895)
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testSTYQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("select ST_Y(ST_Point(-121.895, 37.339)) "
+          + "from cp.`/sample-data/CA-cities.csv` limit 1")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(37.339)
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testIntersectQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Intersects(ST_GeomFromText('POINT(0 0)'), ST_GeomFromText('LINESTRING(2 0,0 2)')) "
+          + "from (VALUES(1))")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(false)
+      .build()
+      .run();
+
+    testBuilder()
+    .sqlQuery("SELECT ST_Intersects(ST_GeomFromText('POINT(0 0)'), ST_GeomFromText('LINESTRING(0 0,0 2)')) "
+        + "from (VALUES(1))")
+    .ordered().baselineColumns("EXPR$0")
+    .baselineValues(true)
+    .build()
+    .run();
+  }
+
+  @Test
+  public void testRelateQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Relate(ST_GeomFromText('POINT(1 2)'), ST_Buffer(ST_GeomFromText('POINT(1 2)'),2), '0FFFFF212') "
+          + "from (VALUES(1))")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(true)
+      .build()
+      .run();
+
+    testBuilder()
+    .sqlQuery("SELECT ST_Relate(ST_GeomFromText('POINT(1 2)'), ST_Buffer(ST_GeomFromText('POINT(1 2)'),2), '*FF*FF212') "
+        + "from (VALUES(1))")
+    .ordered().baselineColumns("EXPR$0")
+    .baselineValues(true)
+    .build()
+    .run();
+
+    testBuilder()
+    .sqlQuery("SELECT ST_Relate(ST_GeomFromText('POINT(0 0)'), ST_Buffer(ST_GeomFromText('POINT(1 2)'),2), '*FF*FF212') "
+        + "from (VALUES(1))")
+    .ordered().baselineColumns("EXPR$0")
+    .baselineValues(false)
+    .build()
+    .run();
+  }
+
+  @Test
+  public void testTouchesQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Touches(ST_GeomFromText('LINESTRING(0 0, 1 1, 0 2)'), ST_GeomFromText('POINT(1 1)')) "
+          + "from (VALUES(1))")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(false)
+      .build()
+      .run();
+
+    testBuilder()
+    .sqlQuery("SELECT ST_Touches(ST_GeomFromText('LINESTRING(0 0, 1 1, 0 2)'), ST_GeomFromText('POINT(0 2)')) "
+        + "from (VALUES(1))")
+    .ordered().baselineColumns("EXPR$0")
+    .baselineValues(true)
+    .build()
+    .run();
+  }
+
+  @Test
+  public void testEqualsQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Equals(ST_GeomFromText('LINESTRING(0 0, 10 10)'), "
+                + "ST_GeomFromText('LINESTRING(0 0, 5 5, 10 10)')) from (VALUES(1))")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(true)
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testContainsQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Contains(smallc, bigc) As smallcontainsbig, "
+                     + "ST_Contains(bigc,smallc) As bigcontainssmall, "
+                     + "ST_Contains(bigc, ST_Union(smallc, bigc)) as bigcontainsunion, "
+                     + "ST_Equals(bigc, ST_Union(smallc, bigc)) as bigisunion "
+                + "FROM (SELECT ST_Buffer(ST_GeomFromText('POINT(1 2)'), 10) As smallc, "
+                       + "ST_Buffer(ST_GeomFromText('POINT(1 2)'), 20) As bigc from (VALUES(1)) ) As foo")
+      .ordered().baselineColumns("smallcontainsbig", "bigcontainssmall", "bigcontainsunion", "bigisunion")
+      .baselineValues(false, true, true, true)
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testOverlapsCrossesIntersectsContainsQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Overlaps(a,b) As a_overlap_b, "
+                  + "ST_Crosses(a,b) As a_crosses_b, "
+                  + "ST_Intersects(a, b) As a_intersects_b, "
+                  + "ST_Contains(b,a) As b_contains_a "
+                + "FROM (SELECT ST_GeomFromText('POINT(1 0.5)') As a, ST_GeomFromText('LINESTRING(1 0, 1 1, 3 5)')  As b "
+                  + "from (VALUES(1)) ) As foo")
+      .ordered().baselineColumns("a_overlap_b", "a_crosses_b", "a_intersects_b", "b_contains_a")
+      .baselineValues(false, false, true, true)
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testDisjointQuery() throws Exception {
+    testBuilder()
+      .sqlQuery("SELECT ST_Disjoint(ST_GeomFromText('POINT(0 0)'), ST_GeomFromText('LINESTRING( 2 0, 0 2 )')) "
+                + "from (VALUES(1))")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(true)
+      .build()
+      .run();
+
+    testBuilder()
+    .sqlQuery("SELECT ST_Disjoint(ST_GeomFromText('POINT(0 0)'), ST_GeomFromText('LINESTRING( 0 0, 0 2 )')) "
+              + "from (VALUES(1))")
+    .ordered().baselineColumns("EXPR$0")
+    .baselineValues(false)
+    .build()
+    .run();
+  }
+
+  @Test
+  public void testTransfromQuery() throws Exception {
+    double targetX = -71.1776848522251;
+    double targetY = 42.3902896512902;
+
+    testBuilder()
+      .sqlQuery("SELECT round(st_x(st_transform(st_geomfromtext('POINT (743238 2967416)'), 2249, 4326)), 13),"
+          + " round(st_y(st_transform(st_geomfromtext('POINT (743238 2967416)'), 2249, 4326)), 13) from (VALUES(1))")
+      .ordered().baselineColumns("EXPR$0", "EXPR$1")
+      .baselineValues(targetX, targetY)
+      .build()
+      .run();
+  }
+
+  @Test
+  public void testUnionAggregateQuery() throws Exception {
+    String targetAll = "MULTIPOLYGON (((0 -1, 1 -1, 1 0, 1 1, 0 1, 0 0, 0 -1)), "
+                        + "((10 9, 11 9, 11 10, 11 11, 10 11, 10 10, 10 9)))";
+    String targetFirstGroup = "POLYGON ((0 -1, 1 -1, 1 0, 1 1, 0 1, 0 0, 0 -1))";
+    String targetSecondGroup = "POLYGON ((10 9, 11 9, 11 10, 11 11, 10 11, 10 10, 10 9))";
+
+    testBuilder()
+      .sqlQuery("select ST_AsText(ST_UnionAggregate(ST_GeomFromText(columns[1]))) from cp.`sample-data/polygons.tsv`")
+      .ordered().baselineColumns("EXPR$0")
+      .baselineValues(targetAll)
+      .build()
+      .run();
+
+    testBuilder()
+      .sqlQuery("select columns[0], ST_AsText(ST_UnionAggregate(ST_GeomFromText(columns[1])))"
+          + " from cp.`sample-data/polygons.tsv` group by columns[0] having columns[0] = '1'")
+      .ordered().baselineColumns("EXPR$0", "EXPR$1")
+      .baselineValues("1", targetFirstGroup)
+      .build()
+      .run();
+
+    testBuilder()
+      .sqlQuery("select columns[0], ST_AsText(ST_UnionAggregate(ST_GeomFromText(columns[1])))"
+          + " from cp.`sample-data/polygons.tsv` group by columns[0] having columns[0] = '2'")
+      .ordered().baselineColumns("EXPR$0", "EXPR$1")
+      .baselineValues("2", targetSecondGroup)
+      .build()
+      .run();
+
+    testBuilder()
+    .sqlQuery("select count(*) from (select columns[0], ST_AsText(ST_UnionAggregate(ST_GeomFromText(columns[1])))"
+        + " from cp.`sample-data/polygons.tsv` group by columns[0])")
+    .ordered().baselineColumns("EXPR$0")
+    .baselineValues(3L)
+    .build()
+    .run();
+  }
 }

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 10/10: DRILL-5584: Add branding and versioning information for windows C++ Client.

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 9908ea035a6a9512ddf005fc402b07a7101243db
Author: Rob Wu <ro...@gmail.com>
AuthorDate: Wed Nov 15 18:21:48 2017 -0800

    DRILL-5584: Add branding and versioning information for windows C++ Client.
    
    This closes #5584
---
 contrib/native/client/src/clientlib/CMakeLists.txt | 11 +++-
 contrib/native/client/src/clientlib/env.h.in       |  7 ++-
 contrib/native/client/src/clientlib/version.rc.in  | 68 ++++++++++++++++++++++
 3 files changed, 84 insertions(+), 2 deletions(-)

diff --git a/contrib/native/client/src/clientlib/CMakeLists.txt b/contrib/native/client/src/clientlib/CMakeLists.txt
index e793e66..90cfe8b 100644
--- a/contrib/native/client/src/clientlib/CMakeLists.txt
+++ b/contrib/native/client/src/clientlib/CMakeLists.txt
@@ -16,6 +16,13 @@
 # limitations under the License.
 #
 
+if(MSVC)
+    configure_file(
+      ${CMAKE_CURRENT_SOURCE_DIR}/version.rc.in
+      ${CMAKE_CURRENT_BINARY_DIR}/version.rc
+      @ONLY)
+endif()
+
 # Drill Client library
 
 set (CLIENTLIB_SRC_FILES
@@ -51,7 +58,9 @@ set_property(
 
 if(MSVC)
     add_definitions(-DDRILL_CLIENT_EXPORTS -D_SCL_SECURE_NO_WARNINGS)
+    add_library(drillClient SHARED ${CLIENTLIB_SRC_FILES} ${CMAKE_CURRENT_BINARY_DIR}/version.rc)
+else()
+    add_library(drillClient SHARED ${CLIENTLIB_SRC_FILES})
 endif()
 
-add_library(drillClient SHARED ${CLIENTLIB_SRC_FILES} )
 target_link_libraries(drillClient ${Boost_LIBRARIES} ${PROTOBUF_LIBRARY} ${Zookeeper_LIBRARIES} ${SASL_LIBRARIES} ${OPENSSL_LIBRARIES} protomsgs y2038)
diff --git a/contrib/native/client/src/clientlib/env.h.in b/contrib/native/client/src/clientlib/env.h.in
index 380746a..794b628 100644
--- a/contrib/native/client/src/clientlib/env.h.in
+++ b/contrib/native/client/src/clientlib/env.h.in
@@ -19,7 +19,7 @@
 #define ENV_H
 
 #define DRILL_NAME              "Apache Drill"
-#define DRILL_CONNECTOR_NAME    "Apache Drill C++ client"
+#define DRILL_CONNECTOR_NAME    "Apache Drill C++ Client"
 #define DRILL_VERSION_STRING    "@PROJECT_VERSION@"
 
 #define DRILL_VERSION_MAJOR @PROJECT_VERSION_MAJOR@
@@ -29,6 +29,11 @@
 #define GIT_SHA_PROP  @GIT_SHA_PROP@
 #define GIT_COMMIT_PROP @GIT_COMMIT_PROP@
 
+#define DRILL_LEGALCOPYRIGHT_STR    "Copyright (c) 2013-2017 The Apache Software Foundation\0"
+#define DRILL_PRODUCTNAME_STR       DRILL_CONNECTOR_NAME "\0"
+#define DRILL_PRODUCTVERSION_STR    DRILL_VERSION_STRING ".0\0"
+#define DRILL_INTERNALNAME_STR      "drillClient.dll\0"
+
 #endif
 
 
diff --git a/contrib/native/client/src/clientlib/version.rc.in b/contrib/native/client/src/clientlib/version.rc.in
new file mode 100644
index 0000000..c013261
--- /dev/null
+++ b/contrib/native/client/src/clientlib/version.rc.in
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+ #include "env.h"
+ 
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated resource.
+//
+#include "afxres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (US) resources.
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION @PROJECT_VERSION_MAJOR@,@PROJECT_VERSION_MINOR@,@PROJECT_VERSION_PATCH@,0
+ PRODUCTVERSION @PROJECT_VERSION_MAJOR@,@PROJECT_VERSION_MINOR@,@PROJECT_VERSION_PATCH@,0
+ FILEFLAGSMASK 0x3fL 
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x0L
+ FILESUBTYPE 0x0L
+BEGIN
+    BLOCK "StringFileInfo"
+    BEGIN
+        BLOCK "040904b0"
+        BEGIN
+            VALUE "CompanyName", "\0"
+            VALUE "FileDescription", DRILL_PRODUCTNAME_STR
+            VALUE "FileVersion", DRILL_PRODUCTVERSION_STR
+            VALUE "LegalCopyright", DRILL_LEGALCOPYRIGHT_STR
+            VALUE "ProductName", DRILL_PRODUCTNAME_STR
+            VALUE "ProductVersion", DRILL_PRODUCTVERSION_STR
+            VALUE "InternalName", DRILL_INTERNALNAME_STR
+            VALUE "OriginalFilename", DRILL_INTERNALNAME_STR
+            VALUE "LegalTrademarks", "\0"
+        END
+    END
+    BLOCK "VarFileInfo"
+    BEGIN
+        VALUE "Translation", 0x409, 1200
+    END
+END
\ No newline at end of file

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 02/10: DRILL-4364: Image Metadata Format Plugin - Initial commit of Image Metadata Format Plugin - See https://issues.apache.org/jira/browse/DRILL-4364

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 04a532d2d8790d69214adbb4a8247f8a382cfd08
Author: Akihiko Kusanagi <ak...@mapr.com>
AuthorDate: Sun Feb 7 03:55:57 2016 +0900

    DRILL-4364: Image Metadata Format Plugin - Initial commit of Image Metadata Format Plugin - See https://issues.apache.org/jira/browse/DRILL-4364
    
    This closes #367
---
 exec/java-exec/pom.xml                             |   5 +
 .../store/image/GenericMetadataDescriptor.java     |  89 ++++
 .../exec/store/image/GenericMetadataDirectory.java | 315 +++++++++++++
 .../exec/store/image/GenericMetadataReader.java    | 412 +++++++++++++++++
 .../drill/exec/store/image/ImageFormatConfig.java  |  97 ++++
 .../drill/exec/store/image/ImageFormatPlugin.java  |  82 ++++
 .../drill/exec/store/image/ImageRecordReader.java  | 493 +++++++++++++++++++++
 .../main/resources/bootstrap-storage-plugins.json  |  18 +
 .../store/dfs/TestFormatPluginOptionExtractor.java |   7 +
 .../exec/store/image/TestImageRecordReader.java    | 128 ++++++
 .../src/test/resources/store/image/1_webp_a.webp   | Bin 0 -> 23404 bytes
 .../src/test/resources/store/image/adobeJpeg1.eps  | Bin 0 -> 99569 bytes
 .../src/test/resources/store/image/avi.json        |  32 ++
 .../src/test/resources/store/image/bmp.json        |  36 ++
 .../src/test/resources/store/image/eps.json        | 116 +++++
 .../src/test/resources/store/image/gif.json        |  47 ++
 .../src/test/resources/store/image/ico.json        |  33 ++
 .../src/test/resources/store/image/jpeg.json       | 213 +++++++++
 .../src/test/resources/store/image/mov.json        |  67 +++
 .../src/test/resources/store/image/mp4.json        |  56 +++
 .../src/test/resources/store/image/pcx.json        |  37 ++
 .../src/test/resources/store/image/png.json        |  57 +++
 .../src/test/resources/store/image/psd.json        | 119 +++++
 .../store/image/rose-128x174-24bit-lzw.tiff        | Bin 0 -> 50476 bytes
 .../resources/store/image/rose-128x174-24bit.bmp   | Bin 0 -> 66872 bytes
 .../resources/store/image/rose-128x174-24bit.pcx   | Bin 0 -> 34864 bytes
 .../store/image/rose-128x174-32bit-alpha.png       | Bin 0 -> 26308 bytes
 .../store/image/rose-128x174-32bit-alpha.psd       | Bin 0 -> 102618 bytes
 .../store/image/rose-128x174-8bit-alpha.gif        | Bin 0 -> 10463 bytes
 .../store/image/rose-32x32-32bit-alpha.ico         | Bin 0 -> 4286 bytes
 .../src/test/resources/store/image/sample.avi      | Bin 0 -> 375688 bytes
 .../src/test/resources/store/image/sample.mov      | Bin 0 -> 469690 bytes
 .../src/test/resources/store/image/sample.mp4      | Bin 0 -> 383631 bytes
 .../src/test/resources/store/image/sample.wav      | Bin 0 -> 37534 bytes
 .../src/test/resources/store/image/tiff.json       |  87 ++++
 .../src/test/resources/store/image/wav.json        |  32 ++
 .../src/test/resources/store/image/webp.json       |  29 ++
 .../test/resources/store/image/withExifAndIptc.jpg | Bin 0 -> 44606 bytes
 exec/jdbc-all/pom.xml                              |   4 +
 39 files changed, 2611 insertions(+)

diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 345e240..0d03cc8 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -632,6 +632,11 @@
         </exclusion>
       </exclusions>
     </dependency>
+    <dependency>
+      <groupId>com.drewnoakes</groupId>
+      <artifactId>metadata-extractor</artifactId>
+      <version>2.11.0</version>
+    </dependency>
   </dependencies>
 
   <profiles>
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDescriptor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDescriptor.java
new file mode 100644
index 0000000..82d42fd
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDescriptor.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import com.drew.lang.annotations.NotNull;
+import com.drew.lang.annotations.Nullable;
+import com.drew.metadata.TagDescriptor;
+
+import static org.apache.drill.exec.store.image.GenericMetadataDirectory.TAG_FILE_SIZE;
+import static org.apache.drill.exec.store.image.GenericMetadataDirectory.TAG_ORIENTATION;
+import static org.apache.drill.exec.store.image.GenericMetadataDirectory.TAG_DURATION;
+
+@SuppressWarnings("WeakerAccess")
+public class GenericMetadataDescriptor extends TagDescriptor<GenericMetadataDirectory>
+{
+  public GenericMetadataDescriptor(@NotNull GenericMetadataDirectory directory)
+  {
+    super(directory);
+  }
+
+  @Override
+  @Nullable
+  public String getDescription(int tagType)
+  {
+    switch (tagType) {
+      case TAG_FILE_SIZE:
+        return getFileSizeDescription();
+      case TAG_ORIENTATION:
+        return getOrientationDescription();
+      case TAG_DURATION:
+        return getDurationDescription();
+      default:
+        return super.getDescription(tagType);
+    }
+  }
+
+  @Nullable
+  private String getFileSizeDescription()
+  {
+    Long size = _directory.getLongObject(TAG_FILE_SIZE);
+
+    if (size == null) {
+      return null;
+    }
+    return Long.toString(size) + " bytes";
+  }
+
+  @Nullable
+  private String getOrientationDescription() {
+    return getIndexedDescription(TAG_ORIENTATION, 1,
+        "Top, left side (Horizontal / normal)",
+        "Top, right side (Mirror horizontal)",
+        "Bottom, right side (Rotate 180)",
+        "Bottom, left side (Mirror vertical)",
+        "Left side, top (Mirror horizontal and rotate 270 CW)",
+        "Right side, top (Rotate 90 CW)",
+        "Right side, bottom (Mirror horizontal and rotate 90 CW)",
+        "Left side, bottom (Rotate 270 CW)");
+  }
+
+  @Nullable
+  private String getDurationDescription() {
+    Long value = _directory.getLongObject(TAG_DURATION);
+    if (value == null) {
+      return null;
+    }
+
+    Integer hours = (int)(value / (Math.pow(60, 2)));
+    Integer minutes = (int)((value / (Math.pow(60, 1))) - (hours * 60));
+    Integer seconds = (int)Math.ceil((value / (Math.pow(60, 0))) - (minutes * 60));
+    return String.format("%1$02d:%2$02d:%3$02d", hours, minutes, seconds);
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDirectory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDirectory.java
new file mode 100644
index 0000000..871a11b
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataDirectory.java
@@ -0,0 +1,315 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import com.drew.lang.annotations.NotNull;
+import com.drew.metadata.Directory;
+import com.drew.metadata.MetadataException;
+
+import java.util.HashMap;
+
+@SuppressWarnings("WeakerAccess")
+public class GenericMetadataDirectory extends Directory
+{
+  public static final int TAG_FILE_SIZE = 1;
+  public static final int TAG_FILE_DATE_TIME = 2;
+  public static final int TAG_FORMAT = 3;
+  public static final int TAG_PIXEL_WIDTH = 4;
+  public static final int TAG_PIXEL_HEIGHT = 5;
+  public static final int TAG_ORIENTATION = 6;
+  public static final int TAG_DPI_WIDTH = 7;
+  public static final int TAG_DPI_HEIGHT = 8;
+  public static final int TAG_COLOR_MODE = 9;
+  public static final int TAG_BITS_PER_PIXEL = 10;
+  public static final int TAG_HAS_ALPHA = 11;
+  public static final int TAG_DURATION = 12;
+  public static final int TAG_VIDEO_CODEC = 13;
+  public static final int TAG_FRAME_RATE = 14;
+  public static final int TAG_AUDIO_CODEC = 15;
+  public static final int TAG_AUDIO_SAMPLE_SIZE = 16;
+  public static final int TAG_AUDIO_SAMPLE_RATE = 17;
+
+  @NotNull
+  protected static final HashMap<Integer, String> _tagNameMap = new HashMap<>();
+
+  static {
+    _tagNameMap.put(TAG_FILE_SIZE, "File Size");
+    _tagNameMap.put(TAG_FILE_DATE_TIME, "File Date Time");
+    _tagNameMap.put(TAG_FORMAT, "Format");
+    _tagNameMap.put(TAG_PIXEL_WIDTH, "Pixel Width");
+    _tagNameMap.put(TAG_PIXEL_HEIGHT, "Pixel Height");
+    _tagNameMap.put(TAG_ORIENTATION, "Orientaion");
+    _tagNameMap.put(TAG_DPI_WIDTH, "DPI Width");
+    _tagNameMap.put(TAG_DPI_HEIGHT, "DPI Height");
+    _tagNameMap.put(TAG_COLOR_MODE, "Color Mode");
+    _tagNameMap.put(TAG_BITS_PER_PIXEL, "Bits Per Pixel");
+    _tagNameMap.put(TAG_HAS_ALPHA, "Has Alpha");
+    _tagNameMap.put(TAG_DURATION, "Duration");
+    _tagNameMap.put(TAG_VIDEO_CODEC, "Video Codec");
+    _tagNameMap.put(TAG_FRAME_RATE, "Frame Rate");
+    _tagNameMap.put(TAG_AUDIO_CODEC, "Audio Codec");
+    _tagNameMap.put(TAG_AUDIO_SAMPLE_SIZE, "Audio Sample Size");
+    _tagNameMap.put(TAG_AUDIO_SAMPLE_RATE, "Audio Sample Rate");
+  }
+
+  public GenericMetadataDirectory()
+  {
+    this.setDescriptor(new GenericMetadataDescriptor(this));
+  }
+
+  @Override
+  @NotNull
+  public String getName()
+  {
+    return "Generic Metadata";
+  }
+
+  @Override
+  @NotNull
+  protected HashMap<Integer, String> getTagNameMap()
+  {
+    return _tagNameMap;
+  }
+
+  private void setIntIfEmpty(int tagType, int value) {
+    if (!containsTag(tagType)) {
+      setInt(tagType, value);
+    }
+  }
+
+  private void setLongIfEmpty(int tagType, long value) {
+    if (!containsTag(tagType)) {
+      setLong(tagType, value);
+    }
+  }
+
+  private void setBooleanIfEmpty(int tagType, boolean value) {
+    if (!containsTag(tagType)) {
+      setBoolean(tagType, value);
+    }
+  }
+
+  private void setDoubleIfEmpty(int tagType, double value) {
+    if (!containsTag(tagType)) {
+      setDouble(tagType, value);
+    }
+  }
+
+  private void setStringIfEmpty(int tagType, @NotNull String value) {
+    if (!containsTag(tagType)) {
+      setString(tagType, value);
+    }
+  }
+
+  public void setPixelWidth(int pixelWidth) {
+    setIntIfEmpty(TAG_PIXEL_WIDTH, pixelWidth);
+  }
+
+  public void setPixelWidth(Directory directory, int tagType) {
+    try {
+      setPixelWidth(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setPixelHeight(int pixelHeight) {
+    setIntIfEmpty(TAG_PIXEL_HEIGHT, pixelHeight);
+  }
+
+  public void setPixelHeight(Directory directory, int tagType) {
+    try {
+      setPixelHeight(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setOrientation(int orientation) {
+    setIntIfEmpty(TAG_ORIENTATION, orientation);
+  }
+
+  public void setOrientation(Directory directory, int tagType) {
+    try {
+      setOrientation(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setDPIWidth(double dpiWidth) {
+    setDoubleIfEmpty(TAG_DPI_WIDTH, dpiWidth);
+  }
+
+  public void setDPIWidth(Directory directory, int tagType) {
+    try {
+      setDPIWidth(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setDPIWidth(Directory directory, int tagType, double factor) {
+    try {
+      setDPIWidth(directory.getInt(tagType) * factor);
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setDPIHeight(double dpiHeight) {
+    setDoubleIfEmpty(TAG_DPI_HEIGHT, dpiHeight);
+  }
+
+  public void setDPIHeight(Directory directory, int tagType) {
+    try {
+      setDPIHeight(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setDPIHeight(Directory directory, int tagType, double factor) {
+    try {
+      setDPIHeight(directory.getInt(tagType) * factor);
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setColorMode(String colorMode) {
+    setStringIfEmpty(TAG_COLOR_MODE, colorMode);
+  }
+
+  public void setColorMode(Directory directory, int tagType) {
+    String colorMode = directory.getDescription(tagType);
+    if (colorMode != null) {
+      setColorMode(colorMode);
+    }
+  }
+
+  public void setBitPerPixel(int bitPerPixel) {
+    setIntIfEmpty(TAG_BITS_PER_PIXEL, bitPerPixel);
+  }
+
+  public void setBitPerPixel(Directory directory, int tagType) {
+    try {
+      setBitPerPixel(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setBitPerPixel(Directory directory, int tagType1, int tagType2) {
+    try {
+      setBitPerPixel(directory.getInt(tagType1) * directory.getInt(tagType2));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setAlpha(boolean alpha) {
+    setBooleanIfEmpty(TAG_HAS_ALPHA, alpha);
+  }
+
+  public void setAlpha(Directory directory, int tagType) {
+    try {
+      setAlpha(directory.getBoolean(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setDuration(long duration) {
+    setLongIfEmpty(TAG_DURATION, duration);
+  }
+
+  public void setDuration(Directory directory, int tagType) {
+    Object o = directory.getObject(tagType);
+    if (o != null) {
+      if (o instanceof String) {
+        String[] time = ((String) o).split(":");
+        setDuration(
+          Long.parseLong(time[0]) * 3600 +
+          Long.parseLong(time[1]) * 60 +
+          Long.parseLong(time[2]));
+      } else if (o instanceof Number) {
+        setDuration(((Number) o).longValue());
+      }
+    }
+  }
+
+  public void setVideoCodec(String videoCodec) {
+    setStringIfEmpty(TAG_VIDEO_CODEC, videoCodec);
+  }
+
+  public void setVideoCodec(Directory directory, int tagType) {
+    String videoCodec = directory.getString(tagType);
+    if (videoCodec != null) {
+      setVideoCodec(videoCodec);
+    }
+  }
+
+  public void setFrameRate(double frameRate) {
+    setDoubleIfEmpty(TAG_FRAME_RATE, frameRate);
+  }
+
+  public void setFrameRate(Directory directory, int tagType) {
+    try {
+      setFrameRate(directory.getDouble(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setAudioCodec(String audioCodec) {
+    setStringIfEmpty(TAG_AUDIO_CODEC, audioCodec);
+  }
+
+  public void setAudioCodec(Directory directory, int tagType) {
+    String audioCodec = directory.getString(tagType);
+    if (audioCodec != null) {
+      setAudioCodec(audioCodec);
+    }
+  }
+
+  public void setAudioSampleSize(int audioSampleSize) {
+    setIntIfEmpty(TAG_AUDIO_SAMPLE_SIZE, audioSampleSize);
+  }
+
+  public void setAudioSampleSize(Directory directory, int tagType) {
+    try {
+      setAudioSampleSize(directory.getInt(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+
+  public void setAudioSampleRate(double audioSampleRate) {
+    setDoubleIfEmpty(TAG_AUDIO_SAMPLE_RATE, audioSampleRate);
+  }
+
+  public void setAudioSampleRate(Directory directory, int tagType) {
+    try {
+      setAudioSampleRate(directory.getDouble(tagType));
+    } catch (MetadataException e) {
+      // Nothing needs to be done
+    }
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataReader.java
new file mode 100644
index 0000000..cec677d
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/GenericMetadataReader.java
@@ -0,0 +1,412 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import com.drew.imaging.FileType;
+import com.drew.imaging.png.PngChunkType;
+import com.drew.imaging.png.PngColorType;
+import com.drew.lang.annotations.NotNull;
+import com.drew.metadata.Directory;
+import com.drew.metadata.Metadata;
+import com.drew.metadata.MetadataException;
+import com.drew.metadata.avi.AviDirectory;
+import com.drew.metadata.bmp.BmpHeaderDirectory;
+import com.drew.metadata.eps.EpsDirectory;
+import com.drew.metadata.exif.ExifIFD0Directory;
+import com.drew.metadata.exif.ExifSubIFDDirectory;
+import com.drew.metadata.gif.GifControlDirectory;
+import com.drew.metadata.gif.GifHeaderDirectory;
+import com.drew.metadata.ico.IcoDirectory;
+import com.drew.metadata.jfif.JfifDirectory;
+import com.drew.metadata.jpeg.JpegDirectory;
+import com.drew.metadata.mov.QuickTimeDirectory;
+import com.drew.metadata.mov.media.QuickTimeSoundDirectory;
+import com.drew.metadata.mov.media.QuickTimeVideoDirectory;
+import com.drew.metadata.mp4.Mp4Directory;
+import com.drew.metadata.mp4.media.Mp4SoundDirectory;
+import com.drew.metadata.mp4.media.Mp4VideoDirectory;
+import com.drew.metadata.pcx.PcxDirectory;
+import com.drew.metadata.photoshop.PsdHeaderDirectory;
+import com.drew.metadata.png.PngDirectory;
+import com.drew.metadata.wav.WavDirectory;
+import com.drew.metadata.webp.WebpDirectory;
+import org.apache.hadoop.fs.FileStatus;
+
+import java.util.Date;
+import java.util.TimeZone;
+
+public class GenericMetadataReader
+{
+  public void read(@NotNull FileType fileType, @NotNull FileStatus fileStatus, @NotNull Metadata metadata)
+  {
+    GenericMetadataDirectory directory = new GenericMetadataDirectory();
+    boolean skipEPSPreview = false;
+
+    directory.setLong(GenericMetadataDirectory.TAG_FILE_SIZE, fileStatus.getLen());
+    // Add local time zone offset to store the last modified time as local time
+    // just like TO_TIMESTAMP(UNIX_TIMESTAMP()) returns local time
+    directory.setDate(GenericMetadataDirectory.TAG_FILE_DATE_TIME,
+        new Date(fileStatus.getModificationTime() + TimeZone.getDefault().getRawOffset()));
+    directory.setString(GenericMetadataDirectory.TAG_FORMAT, fileType.name().toUpperCase());
+
+    for (Directory dir : metadata.getDirectories()) {
+
+      if (dir instanceof JpegDirectory) {
+        final JpegDirectory jpegDir = (JpegDirectory)dir;
+        directory.setPixelWidth(jpegDir, JpegDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(jpegDir, JpegDirectory.TAG_IMAGE_HEIGHT);
+        directory.setBitPerPixel(jpegDir, JpegDirectory.TAG_DATA_PRECISION, JpegDirectory.TAG_NUMBER_OF_COMPONENTS);
+        continue;
+      }
+
+      if (dir instanceof JfifDirectory) {
+        final JfifDirectory jfifDir = (JfifDirectory)dir;
+        try {
+          final int unit = jfifDir.getResUnits();
+          if (unit == 1 || unit == 2) {
+            directory.setDPIWidth(jfifDir, JfifDirectory.TAG_RESX, unit == 1 ? 1.0 : 2.54);
+            directory.setDPIHeight(jfifDir, JfifDirectory.TAG_RESY, unit == 1 ? 1.0 : 2.54);
+          }
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        continue;
+      }
+
+      if (dir instanceof ExifIFD0Directory) {
+        if (skipEPSPreview) {
+          skipEPSPreview = false;
+          continue;
+        }
+
+        final ExifIFD0Directory ifd0Dir = (ExifIFD0Directory)dir;
+        directory.setPixelWidth(ifd0Dir, ExifIFD0Directory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(ifd0Dir, ExifIFD0Directory.TAG_IMAGE_HEIGHT);
+        directory.setOrientation(ifd0Dir, ExifIFD0Directory.TAG_ORIENTATION);
+        try {
+          final int unit = ifd0Dir.getInt(ExifIFD0Directory.TAG_RESOLUTION_UNIT);
+          if (unit == 2 || unit == 3) {
+            directory.setDPIWidth(ifd0Dir, ExifIFD0Directory.TAG_X_RESOLUTION, unit == 2 ? 1.0 : 2.54);
+            directory.setDPIHeight(ifd0Dir, ExifIFD0Directory.TAG_Y_RESOLUTION, unit == 2 ? 1.0 : 2.54);
+          }
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        int[] bitPerSample = ifd0Dir.getIntArray(ExifIFD0Directory.TAG_BITS_PER_SAMPLE);
+        if (bitPerSample != null) {
+          int bitsPerPixel = 0;
+          for (int n : bitPerSample) {
+            bitsPerPixel += n;
+          }
+          directory.setBitPerPixel(bitsPerPixel);
+        }
+        continue;
+      }
+
+      if (dir instanceof ExifSubIFDDirectory) {
+        final ExifSubIFDDirectory subIFDDir = (ExifSubIFDDirectory)dir;
+        directory.setPixelWidth(subIFDDir, ExifSubIFDDirectory.TAG_EXIF_IMAGE_WIDTH);
+        directory.setPixelHeight(subIFDDir, ExifSubIFDDirectory.TAG_EXIF_IMAGE_HEIGHT);
+        continue;
+      }
+
+      if (dir instanceof PsdHeaderDirectory) {
+        final PsdHeaderDirectory psdDir = (PsdHeaderDirectory)dir;
+        directory.setPixelWidth(psdDir, PsdHeaderDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(psdDir, PsdHeaderDirectory.TAG_IMAGE_HEIGHT);
+        directory.setBitPerPixel(
+            psdDir, PsdHeaderDirectory.TAG_BITS_PER_CHANNEL, PsdHeaderDirectory.TAG_CHANNEL_COUNT);
+        directory.setColorMode(psdDir, PsdHeaderDirectory.TAG_COLOR_MODE);
+        continue;
+      }
+
+      if (dir instanceof PngDirectory) {
+        final PngDirectory pngDir = (PngDirectory)dir;
+
+        if (pngDir.getPngChunkType() == PngChunkType.IHDR) {
+          directory.setPixelWidth(pngDir, PngDirectory.TAG_IMAGE_WIDTH);
+          directory.setPixelHeight(pngDir, PngDirectory.TAG_IMAGE_HEIGHT);
+          try {
+            int numOfComponent = 1;
+            int colorType = pngDir.getInt(PngDirectory.TAG_COLOR_TYPE);
+            if (colorType == PngColorType.IndexedColor.getNumericValue()) {
+              directory.setColorMode("Indexed");
+            } else if (colorType == PngColorType.Greyscale.getNumericValue()) {
+              directory.setColorMode("Grayscale");
+            } else if (colorType == PngColorType.GreyscaleWithAlpha.getNumericValue()) {
+              numOfComponent = 2;
+              directory.setColorMode("Grayscale");
+              directory.setAlpha(true);
+            } else if (colorType == PngColorType.TrueColor.getNumericValue()) {
+              numOfComponent = 3;
+            } else if (colorType == PngColorType.TrueColorWithAlpha.getNumericValue()) {
+              numOfComponent = 4;
+              directory.setAlpha(true);
+            }
+            directory.setBitPerPixel(pngDir.getInt(PngDirectory.TAG_BITS_PER_SAMPLE) * numOfComponent);
+          } catch (MetadataException e) {
+            // Nothing needs to be done
+          }
+          continue;
+        }
+
+        if (pngDir.getPngChunkType() == PngChunkType.pHYs) {
+          try {
+            final int unit = pngDir.getInt(PngDirectory.TAG_UNIT_SPECIFIER);
+            if (unit == 1) {
+              directory.setDPIWidth(pngDir, PngDirectory.TAG_PIXELS_PER_UNIT_X, 0.0254);
+              directory.setDPIHeight(pngDir, PngDirectory.TAG_PIXELS_PER_UNIT_Y, 0.0254);
+            }
+          } catch (MetadataException e) {
+            // Nothing needs to be done
+          }
+          continue;
+        }
+
+        if (pngDir.getPngChunkType() == PngChunkType.tRNS) {
+          directory.setAlpha(true);
+          continue;
+        }
+
+        continue;
+      }
+
+      if (dir instanceof BmpHeaderDirectory) {
+        final BmpHeaderDirectory bmpDir = (BmpHeaderDirectory)dir;
+        directory.setPixelWidth(bmpDir, BmpHeaderDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(bmpDir, BmpHeaderDirectory.TAG_IMAGE_HEIGHT);
+        directory.setDPIWidth(bmpDir, BmpHeaderDirectory.TAG_X_PIXELS_PER_METER, 0.0254);
+        directory.setDPIHeight(bmpDir, BmpHeaderDirectory.TAG_Y_PIXELS_PER_METER, 0.0254);
+        try {
+          final int bitsPerPixel = bmpDir.getInt(BmpHeaderDirectory.TAG_BITS_PER_PIXEL);
+          if (bitsPerPixel <= 8) {
+            directory.setColorMode("Indexed");
+          }
+          directory.setBitPerPixel(bitsPerPixel);
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        continue;
+      }
+
+      if (dir instanceof GifHeaderDirectory) {
+        final GifHeaderDirectory gifDir = (GifHeaderDirectory)dir;
+        directory.setPixelWidth(gifDir, GifHeaderDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(gifDir, GifHeaderDirectory.TAG_IMAGE_HEIGHT);
+        directory.setColorMode("Indexed");
+        directory.setBitPerPixel(gifDir, GifHeaderDirectory.TAG_BITS_PER_PIXEL);
+        continue;
+      }
+
+      if (dir instanceof GifControlDirectory) {
+        final GifControlDirectory gifControlDir = (GifControlDirectory)dir;
+        directory.setAlpha(gifControlDir, GifControlDirectory.TAG_TRANSPARENT_COLOR_FLAG);
+        continue;
+      }
+
+      if (dir instanceof IcoDirectory) {
+        final IcoDirectory icoDir = (IcoDirectory)dir;
+        directory.setPixelWidth(icoDir, IcoDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(icoDir, IcoDirectory.TAG_IMAGE_HEIGHT);
+        try {
+          if (icoDir.getInt(IcoDirectory.TAG_COLOUR_PALETTE_SIZE) != 0) {
+            directory.setColorMode("Indexed");
+          }
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        directory.setBitPerPixel(icoDir, IcoDirectory.TAG_BITS_PER_PIXEL);
+        directory.setAlpha(true);
+        continue;
+      }
+
+      if (dir instanceof PcxDirectory) {
+        final PcxDirectory pcxDir = (PcxDirectory)dir;
+        try {
+          directory.setPixelWidth(pcxDir.getInt(PcxDirectory.TAG_XMAX) - pcxDir.getInt(PcxDirectory.TAG_XMIN) + 1);
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        try {
+          directory.setPixelHeight(pcxDir.getInt(PcxDirectory.TAG_YMAX) - pcxDir.getInt(PcxDirectory.TAG_YMIN) + 1);
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        directory.setDPIWidth(pcxDir, PcxDirectory.TAG_HORIZONTAL_DPI);
+        directory.setDPIHeight(pcxDir, PcxDirectory.TAG_VERTICAL_DPI);
+        directory.setBitPerPixel(pcxDir, PcxDirectory.TAG_BITS_PER_PIXEL, PcxDirectory.TAG_COLOR_PLANES);
+        try {
+          int colorPlanes = pcxDir.getInt(PcxDirectory.TAG_COLOR_PLANES);
+          if (colorPlanes == 1) {
+            if (pcxDir.getInt(PcxDirectory.TAG_PALETTE_TYPE) == 2) {
+              directory.setColorMode("Grayscale");
+            } else {
+              directory.setColorMode("Indexed");
+            }
+          }
+          directory.setAlpha(colorPlanes == 4);
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        continue;
+      }
+
+      if (dir instanceof WavDirectory) {
+        final WavDirectory wavDir = (WavDirectory)dir;
+        directory.setColorMode("N/A");
+        directory.setDuration(wavDir, WavDirectory.TAG_DURATION);
+        directory.setAudioCodec(wavDir, WavDirectory.TAG_FORMAT);
+        directory.setAudioSampleSize(wavDir, WavDirectory.TAG_BITS_PER_SAMPLE);
+        directory.setAudioSampleRate(wavDir, WavDirectory.TAG_SAMPLES_PER_SEC);
+      }
+
+      if (dir instanceof AviDirectory) {
+        final AviDirectory aviDir = (AviDirectory)dir;
+        directory.setPixelWidth(aviDir, AviDirectory.TAG_WIDTH);
+        directory.setPixelHeight(aviDir, AviDirectory.TAG_HEIGHT);
+        directory.setDuration(aviDir, AviDirectory.TAG_DURATION);
+        directory.setVideoCodec(aviDir, AviDirectory.TAG_VIDEO_CODEC);
+        directory.setFrameRate(aviDir, AviDirectory.TAG_FRAMES_PER_SECOND);
+        directory.setAudioCodec(aviDir, AviDirectory.TAG_AUDIO_CODEC);
+        directory.setAudioSampleRate(aviDir, AviDirectory.TAG_SAMPLES_PER_SECOND);
+        continue;
+      }
+
+      if (dir instanceof WebpDirectory) {
+        final WebpDirectory webpDir = (WebpDirectory)dir;
+        directory.setPixelWidth(webpDir, WebpDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(webpDir, WebpDirectory.TAG_IMAGE_HEIGHT);
+        directory.setAlpha(webpDir, WebpDirectory.TAG_HAS_ALPHA);
+        continue;
+      }
+
+      if (dir instanceof QuickTimeVideoDirectory) {
+        final QuickTimeVideoDirectory qtVideoDir = (QuickTimeVideoDirectory)dir;
+        directory.setPixelWidth(qtVideoDir, QuickTimeVideoDirectory.TAG_WIDTH);
+        directory.setPixelHeight(qtVideoDir, QuickTimeVideoDirectory.TAG_HEIGHT);
+        directory.setDPIWidth(qtVideoDir, QuickTimeVideoDirectory.TAG_HORIZONTAL_RESOLUTION);
+        directory.setDPIHeight(qtVideoDir, QuickTimeVideoDirectory.TAG_VERTICAL_RESOLUTION);
+        try {
+          int bitsPerPixel = qtVideoDir.getInt(QuickTimeVideoDirectory.TAG_DEPTH) % 32;
+          directory.setBitPerPixel(bitsPerPixel);
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        directory.setDuration(qtVideoDir, QuickTimeVideoDirectory.TAG_DURATION);
+        directory.setVideoCodec(qtVideoDir, QuickTimeVideoDirectory.TAG_COMPRESSION_TYPE);
+        directory.setFrameRate(qtVideoDir, QuickTimeVideoDirectory.TAG_FRAME_RATE);
+        continue;
+      }
+
+      if (dir instanceof QuickTimeSoundDirectory) {
+        final QuickTimeSoundDirectory qtSoundDir = (QuickTimeSoundDirectory)dir;
+        directory.setAudioCodec(qtSoundDir, QuickTimeSoundDirectory.TAG_AUDIO_FORMAT);
+        directory.setAudioSampleSize(qtSoundDir, QuickTimeSoundDirectory.TAG_AUDIO_SAMPLE_SIZE);
+        directory.setAudioSampleRate(qtSoundDir, QuickTimeSoundDirectory.TAG_AUDIO_SAMPLE_RATE);
+        continue;
+      }
+
+      if (dir instanceof QuickTimeDirectory) {
+        final QuickTimeDirectory qtDir = (QuickTimeDirectory)dir;
+        directory.setDuration(qtDir, QuickTimeDirectory.TAG_DURATION);
+        continue;
+      }
+
+     if (dir instanceof Mp4VideoDirectory) {
+        final Mp4VideoDirectory mp4VideoDir = (Mp4VideoDirectory)dir;
+        directory.setPixelWidth(mp4VideoDir, Mp4VideoDirectory.TAG_WIDTH);
+        directory.setPixelHeight(mp4VideoDir, Mp4VideoDirectory.TAG_HEIGHT);
+        directory.setDPIWidth(mp4VideoDir, Mp4VideoDirectory.TAG_HORIZONTAL_RESOLUTION);
+        directory.setDPIHeight(mp4VideoDir, Mp4VideoDirectory.TAG_VERTICAL_RESOLUTION);
+        try {
+          int bitsPerPixel = mp4VideoDir.getInt(Mp4VideoDirectory.TAG_DEPTH) % 32;
+          directory.setBitPerPixel(bitsPerPixel);
+        } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        directory.setDuration(mp4VideoDir, Mp4VideoDirectory.TAG_DURATION);
+        directory.setVideoCodec(mp4VideoDir, Mp4VideoDirectory.TAG_COMPRESSION_TYPE);
+        directory.setFrameRate(mp4VideoDir, Mp4VideoDirectory.TAG_FRAME_RATE);
+        continue;
+      }
+
+      if (dir instanceof Mp4SoundDirectory) {
+        final Mp4SoundDirectory mp4SoundDir = (Mp4SoundDirectory)dir;
+        directory.setAudioCodec(mp4SoundDir, Mp4SoundDirectory.TAG_AUDIO_FORMAT);
+        directory.setAudioSampleSize(mp4SoundDir, Mp4SoundDirectory.TAG_AUDIO_SAMPLE_SIZE);
+        directory.setAudioSampleRate(mp4SoundDir, Mp4SoundDirectory.TAG_AUDIO_SAMPLE_RATE);
+        continue;
+      }
+
+      if (dir instanceof Mp4Directory) {
+        final Mp4Directory mp4Dir = (Mp4Directory)dir;
+        directory.setDuration(mp4Dir, Mp4Directory.TAG_DURATION);
+        continue;
+      }
+
+      if (dir instanceof EpsDirectory) {
+        final EpsDirectory epsDir = (EpsDirectory)dir;
+        directory.setPixelWidth(epsDir, EpsDirectory.TAG_IMAGE_WIDTH);
+        directory.setPixelHeight(epsDir, EpsDirectory.TAG_IMAGE_HEIGHT);
+        try {
+          int bitsPerPixel = 24;
+          int colorType = epsDir.getInt(EpsDirectory.TAG_COLOR_TYPE);
+          if (colorType == 1) {
+            String imageData = epsDir.getString(EpsDirectory.TAG_IMAGE_DATA);
+            if (imageData != null && imageData.split(" ")[2].equals("1")) {
+              bitsPerPixel = 1;
+              directory.setColorMode("Bitmap");
+            } else {
+              bitsPerPixel = 8;
+              directory.setColorMode("Grayscale");
+            }
+          } else if (colorType == 2) {
+            directory.setColorMode("Lab");
+          } else if (colorType == 4) {
+            bitsPerPixel = 32;
+            directory.setColorMode("CMYK");
+          }
+          directory.setBitPerPixel(bitsPerPixel);
+          skipEPSPreview = epsDir.containsTag(EpsDirectory.TAG_TIFF_PREVIEW_SIZE);
+         } catch (MetadataException e) {
+          // Nothing needs to be done
+        }
+        continue;
+      }
+    }
+
+    // Set default value if empty
+    directory.setPixelWidth(0);
+    directory.setPixelHeight(0);
+    directory.setOrientation(0);
+    directory.setDPIWidth(0.0);
+    directory.setDPIHeight(0.0);
+    directory.setColorMode("RGB");
+    directory.setBitPerPixel(0);
+    directory.setAlpha(false);
+    directory.setDuration(0);
+    directory.setVideoCodec("Unknown");
+    directory.setFrameRate(0.0);
+    directory.setAudioCodec("Unknown");
+    directory.setAudioSampleSize(0);
+    directory.setAudioSampleRate(0.0);
+
+    metadata.addDirectory(directory);
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatConfig.java
new file mode 100644
index 0000000..84d957f
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatConfig.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import java.util.List;
+
+import org.apache.drill.common.logical.FormatPluginConfig;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import com.google.common.collect.ImmutableList;
+
+@JsonTypeName("image") @JsonInclude(Include.NON_DEFAULT)
+public class ImageFormatConfig implements FormatPluginConfig {
+
+  public List<String> extensions = ImmutableList.of();
+  public boolean fileSystemMetadata = true;
+  public boolean descriptive = true;
+  public String timeZone = null;
+
+  public List<String> getExtensions() {
+    return extensions;
+  }
+
+  public boolean hasFileSystemMetadata() {
+    return fileSystemMetadata;
+  }
+
+  public boolean isDescriptive() {
+    return descriptive;
+  }
+
+  public String getTimeZone() {
+    return timeZone;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + ((extensions == null) ? 0 : extensions.hashCode());
+    result = prime * result + (fileSystemMetadata ? 1231 : 1237);
+    result = prime * result + (descriptive ? 1231 : 1237);
+    result = prime * result + ((timeZone == null) ? 0 : timeZone.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    } else if (obj == null) {
+      return false;
+    } else if (getClass() != obj.getClass()) {
+      return false;
+    }
+    ImageFormatConfig other = (ImageFormatConfig) obj;
+    if (extensions == null) {
+      if (other.extensions != null) {
+        return false;
+      }
+    } else if (!extensions.equals(other.extensions)) {
+      return false;
+    }
+    if (fileSystemMetadata != other.fileSystemMetadata) {
+      return false;
+    }
+    if (descriptive != other.descriptive) {
+      return false;
+    }
+    if (timeZone == null) {
+      if (other.timeZone != null) {
+        return false;
+      }
+    } else if (!timeZone.equals(other.timeZone)) {
+      return false;
+    }
+    return true;
+  }
+}
\ No newline at end of file
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java
new file mode 100644
index 0000000..6b0b9b4
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageFormatPlugin.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.RecordReader;
+import org.apache.drill.exec.store.RecordWriter;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.store.dfs.easy.EasyFormatPlugin;
+import org.apache.drill.exec.store.dfs.easy.EasyWriter;
+import org.apache.drill.exec.store.dfs.easy.FileWork;
+import org.apache.hadoop.conf.Configuration;
+
+public class ImageFormatPlugin extends EasyFormatPlugin<ImageFormatConfig> {
+
+  private final static String DEFAULT_NAME = "image";
+
+  public ImageFormatPlugin(String name, DrillbitContext context, Configuration fsConf,
+                           StoragePluginConfig storageConfig) {
+    super(name, context, fsConf, storageConfig, new ImageFormatConfig(), true, false, false, false,
+        Collections.<String>emptyList(), DEFAULT_NAME);
+  }
+
+  public ImageFormatPlugin(String name, DrillbitContext context, Configuration fsConf,
+                           StoragePluginConfig storageConfig, ImageFormatConfig formatConfig) {
+    super(name, context, fsConf, storageConfig, formatConfig, true, false, false, false,
+        formatConfig.getExtensions(), DEFAULT_NAME);
+  }
+
+  @Override
+  public RecordReader getRecordReader(FragmentContext context, DrillFileSystem dfs, FileWork fileWork,
+      List<SchemaPath> columns, String userName) throws ExecutionSetupException {
+    return new ImageRecordReader(context, dfs, fileWork.getPath(),
+        ((ImageFormatConfig)formatConfig).hasFileSystemMetadata(),
+        ((ImageFormatConfig)formatConfig).isDescriptive(),
+        ((ImageFormatConfig)formatConfig).getTimeZone());
+  }
+
+  @Override
+  public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
+    throw new UnsupportedOperationException("Drill doesn't currently support writing to image files.");
+  }
+
+  @Override
+  public int getReaderOperatorType() {
+    return 4002;
+  }
+
+  @Override
+  public int getWriterOperatorType() {
+    throw new UnsupportedOperationException("Drill doesn't currently support writing to image files.");
+  }
+
+  @Override
+  public boolean supportsPushDown() {
+    return true;
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageRecordReader.java
new file mode 100644
index 0000000..91f8b99
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/image/ImageRecordReader.java
@@ -0,0 +1,493 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import io.netty.buffer.DrillBuf;
+
+import java.io.BufferedInputStream;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.TimeZone;
+
+import com.adobe.xmp.XMPException;
+import com.adobe.xmp.XMPMeta;
+import com.adobe.xmp.options.IteratorOptions;
+import com.adobe.xmp.properties.XMPPropertyInfo;
+
+import com.drew.imaging.FileType;
+import com.drew.imaging.FileTypeDetector;
+import com.drew.imaging.ImageMetadataReader;
+import com.drew.imaging.ImageProcessingException;
+import com.drew.lang.Charsets;
+import com.drew.lang.KeyValuePair;
+import com.drew.lang.Rational;
+import com.drew.metadata.Directory;
+import com.drew.metadata.Metadata;
+import com.drew.metadata.StringValue;
+import com.drew.metadata.Tag;
+import com.drew.metadata.eps.EpsDirectory;
+import com.drew.metadata.exif.ExifIFD0Directory;
+import com.drew.metadata.exif.ExifInteropDirectory;
+import com.drew.metadata.exif.ExifSubIFDDirectory;
+import com.drew.metadata.exif.GpsDirectory;
+import com.drew.metadata.exif.PanasonicRawIFD0Directory;
+import com.drew.metadata.exif.makernotes.FujifilmMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.NikonType2MakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusCameraSettingsMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusEquipmentMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusFocusInfoMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusImageProcessingMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusRawDevelopment2MakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusRawDevelopmentMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.OlympusRawInfoMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.PanasonicMakernoteDirectory;
+import com.drew.metadata.exif.makernotes.SamsungType2MakernoteDirectory;
+import com.drew.metadata.exif.makernotes.SonyType6MakernoteDirectory;
+import com.drew.metadata.icc.IccDirectory;
+import com.drew.metadata.jpeg.JpegComponent;
+import com.drew.metadata.photoshop.PhotoshopDirectory;
+import com.drew.metadata.png.PngDirectory;
+import com.drew.metadata.xmp.XmpDirectory;
+
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.ops.OperatorContext;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.store.AbstractRecordReader;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+import org.apache.drill.exec.vector.complex.writer.FieldWriter;
+import org.apache.drill.exec.vector.complex.writer.VarCharWriter;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+
+public class ImageRecordReader extends AbstractRecordReader {
+
+  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ImageRecordReader.class);
+
+  private final DrillFileSystem fs;
+  private final Path hadoopPath;
+  private final boolean fileSystemMetadata;
+  private final boolean descriptive;
+  private final TimeZone timeZone;
+
+  private VectorContainerWriter writer;
+  private FileStatus fileStatus;
+  private BufferedInputStream metadataStream;
+  private DrillBuf managedBuffer;
+  private boolean finish;
+
+  public ImageRecordReader(FragmentContext context, DrillFileSystem fs, String inputPath,
+                           boolean fileSystemMetadata, boolean descriptive, String timeZone) {
+    this.fs = fs;
+    hadoopPath = fs.makeQualified(new Path(inputPath));
+    this.fileSystemMetadata = fileSystemMetadata;
+    this.descriptive = descriptive;
+    this.timeZone = (timeZone != null) ? TimeZone.getTimeZone(timeZone) : TimeZone.getDefault();
+    managedBuffer = context.getManagedBuffer();
+  }
+
+  @Override
+  public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException {
+
+    try {
+      fileStatus = fs.getFileStatus(hadoopPath);
+      metadataStream = new BufferedInputStream(fs.open(hadoopPath));
+      writer = new VectorContainerWriter(output);
+      finish = false;
+    } catch (Exception e) {
+      throw handleAndRaise("Failure in creating record reader", e);
+    }
+  }
+
+  private DrillBuf drillBuffer(byte[] b) {
+    if (managedBuffer.capacity() < b.length) {
+      managedBuffer = managedBuffer.reallocIfNeeded(b.length);
+    }
+    managedBuffer.clear();
+    managedBuffer.writeBytes(b);
+    return managedBuffer;
+  }
+
+  protected RuntimeException handleAndRaise(String s, Exception e) {
+    throw UserException.dataReadError(e)
+        .message(s + "\n%s", e.getMessage())
+        .addContext("Path", hadoopPath.toUri().getPath())
+        .build(logger);
+  }
+
+  @Override
+  public int next() {
+
+    if (finish) {
+      return 0;
+    }
+
+    try {
+      writer.allocate();
+      writer.reset();
+
+      final MapWriter rootWriter = writer.rootAsMap();
+      final FileType fileType = FileTypeDetector.detectFileType(metadataStream);
+      final Metadata metadata = ImageMetadataReader.readMetadata(metadataStream);
+
+      try {
+        new GenericMetadataReader().read(fileType, fileStatus, metadata);
+        processGenericMetadataDirectory(rootWriter,
+            metadata.getFirstDirectoryOfType(GenericMetadataDirectory.class));
+      } catch (Exception e) {
+        // simply skip this directory
+      }
+
+      boolean skipEPSPreview = false;
+
+      for (Directory directory : metadata.getDirectories()) {
+        try {
+          if (directory instanceof GenericMetadataDirectory) {
+            continue;
+          }
+          if (directory instanceof ExifIFD0Directory && skipEPSPreview) {
+            skipEPSPreview = false;
+            continue;
+          }
+          if (directory instanceof EpsDirectory) {
+            // If an EPS file contains a TIFF preview, skip the next IFD0
+            skipEPSPreview = directory.containsTag(EpsDirectory.TAG_TIFF_PREVIEW_SIZE);
+          }
+          final MapWriter directoryWriter = rootWriter.map(formatName(directory.getName()));
+          processDirectory(directoryWriter, directory, metadata);
+          if (directory instanceof XmpDirectory) {
+            processXmpDirectory(directoryWriter, (XmpDirectory) directory);
+          }
+        } catch (Exception e) {
+          // simply skip this directory
+        }
+      }
+
+      writer.setValueCount(1);
+      finish = true;
+      return 1;
+    } catch (ImageProcessingException e) {
+      finish = true;
+      return 0;
+    } catch (Exception e) {
+      throw handleAndRaise("Failure while reading image metadata record.", e);
+    }
+  }
+
+  private void processGenericMetadataDirectory(final MapWriter writer,
+                                               final GenericMetadataDirectory directory) {
+    for (Tag tag : directory.getTags()) {
+      try {
+        final int tagType = tag.getTagType();
+        if (tagType != GenericMetadataDirectory.TAG_FILE_SIZE &&
+            tagType != GenericMetadataDirectory.TAG_FILE_DATE_TIME || fileSystemMetadata) {
+          writeValue(writer, formatName(tag.getTagName()),
+              descriptive ? directory.getDescription(tagType) : directory.getObject(tagType));
+        }
+      } catch (Exception e) {
+        // simply skip this tag
+      }
+    }
+  }
+
+  private void processDirectory(final MapWriter writer, final Directory directory, final Metadata metadata) {
+    for (Tag tag : directory.getTags()) {
+      try {
+        final int tagType = tag.getTagType();
+        Object value;
+        if (descriptive || isDescriptionTag(directory, tagType)) {
+          value = directory.getDescription(tagType);
+          if (directory instanceof PngDirectory) {
+            if (((PngDirectory) directory).getPngChunkType().areMultipleAllowed()) {
+              value = new String[] { (String) value };
+            }
+          }
+        } else {
+          value = directory.getObject(tagType);
+          if (directory instanceof ExifIFD0Directory && tagType == ExifIFD0Directory.TAG_DATETIME) {
+            ExifSubIFDDirectory exifSubIFDDir = metadata.getFirstDirectoryOfType(ExifSubIFDDirectory.class);
+            String subsecond = null;
+            if (exifSubIFDDir != null) {
+              subsecond = exifSubIFDDir.getString(ExifSubIFDDirectory.TAG_SUBSECOND_TIME);
+            }
+            value = directory.getDate(tagType, subsecond, timeZone);
+          } else if (directory instanceof ExifSubIFDDirectory) {
+            if (tagType == ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL) {
+              value = ((ExifSubIFDDirectory) directory).getDateOriginal(timeZone);
+            } else if (tagType == ExifSubIFDDirectory.TAG_DATETIME_DIGITIZED) {
+              value = ((ExifSubIFDDirectory) directory).getDateDigitized(timeZone);
+            }
+          } else if (directory instanceof GpsDirectory) {
+            if (tagType == GpsDirectory.TAG_LATITUDE) {
+              value = ((GpsDirectory) directory).getGeoLocation().getLatitude();
+            } else if (tagType == GpsDirectory.TAG_LONGITUDE) {
+              value = ((GpsDirectory) directory).getGeoLocation().getLongitude();
+            }
+          }
+          if (isVersionTag(directory, tagType)) {
+            value = directory.getString(tagType, "US-ASCII");
+          } else if (isDateTag(directory, tagType)) {
+            value = directory.getDate(tagType, timeZone);
+          }
+        }
+        writeValue(writer, formatName(tag.getTagName()), value);
+      } catch (Exception e) {
+        // simply skip this tag
+      }
+    }
+  }
+
+  private void processXmpDirectory(final MapWriter writer, final XmpDirectory directory) {
+    HashSet<String> listItems = new HashSet();
+    XMPMeta xmpMeta = directory.getXMPMeta();
+    if (xmpMeta != null) {
+      try {
+        IteratorOptions iteratorOptions = new IteratorOptions().setJustLeafnodes(true);
+        for (final Iterator i = xmpMeta.iterator(iteratorOptions); i.hasNext(); ) {
+          try {
+            XMPPropertyInfo prop = (XMPPropertyInfo) i.next();
+            String path = prop.getPath();
+            String value = prop.getValue();
+            if (path != null && value != null) {
+              // handling lang-alt array items
+              if (prop.getOptions().getHasLanguage()) {
+                XMPPropertyInfo langProp = (XMPPropertyInfo) i.next();
+                if (langProp.getPath().endsWith("/xml:lang")) {
+                  String lang = langProp.getValue();
+                  path = path.replaceFirst("\\[\\d+\\]$", "") +
+                      (lang.equals("x-default") ? "" : "_" + lang);
+                }
+              }
+
+              FieldWriter writerSub = (FieldWriter) writer;
+              String[] elements = path.replaceAll("/\\w+:", "/").split(":|/|(?=\\[)");
+              for (int j = 1; j < elements.length; j++) {
+                String parent = elements[j - 1];
+                boolean isList = elements[j].startsWith("[");
+                if (parent.startsWith("[")) {
+                  writerSub = (FieldWriter) (isList ? writerSub.list() : writerSub.map());
+                  if (listItems.add(path.replaceFirst("[^\\]]+$", ""))) {
+                    writerSub.start();
+                  }
+                } else {
+                  writerSub = (FieldWriter)
+                      (isList ? writerSub.list(formatName(parent)) : writerSub.map(formatName(parent)));
+                }
+              }
+              String parent = elements[elements.length - 1];
+              VarCharWriter varCharWriter = parent.startsWith("[") ?
+                  writerSub.varChar() : writerSub.varChar(formatName(parent));
+              writeString(varCharWriter, value);
+            }
+          } catch (Exception e) {
+            // simply skip this property
+          }
+        }
+      } catch (XMPException ignored) {
+      }
+    }
+  }
+
+  private void writeValue(final MapWriter writer, final String tagName, final Object value) {
+    if (value == null) {
+      return;
+    }
+
+    if (value instanceof Boolean) {
+      writer.bit(tagName).writeBit((Boolean) value ? 1 : 0);
+    } else if (value instanceof Byte) {
+      // TINYINT is not supported
+      writer.integer(tagName).writeInt(((Byte) value).intValue());
+    } else if (value instanceof Short) {
+      // SMALLINT is not supported
+      writer.integer(tagName).writeInt(((Short) value).intValue());
+    } else if (value instanceof Integer) {
+      writer.integer(tagName).writeInt((Integer) value);
+    } else if (value instanceof Long) {
+      writer.bigInt(tagName).writeBigInt((Long) value);
+    } else if (value instanceof Float) {
+      writer.float4(tagName).writeFloat4((Float) value);
+    } else if (value instanceof Double) {
+      writer.float8(tagName).writeFloat8((Double) value);
+    } else if (value instanceof Rational) {
+      writer.float8(tagName).writeFloat8(((Rational) value).doubleValue());
+    } else if (value instanceof String) {
+      writeString(writer.varChar(tagName), (String) value);
+    } else if (value instanceof StringValue) {
+      writeString(writer.varChar(tagName), ((StringValue) value).toString());
+    } else if (value instanceof Date) {
+      writer.timeStamp(tagName).writeTimeStamp(((Date) value).getTime());
+    } else if (value instanceof boolean[]) {
+      for (boolean v : (boolean[]) value) {
+        writer.list(tagName).bit().writeBit(v ? 1 : 0);
+      }
+    } else if (value instanceof byte[]) {
+      final byte[] bytes = (byte[]) value;
+      if (bytes.length == 1) {
+        writer.integer(tagName).writeInt(bytes[0]);
+      } else if (bytes.length <= 4) {
+        ListWriter listWriter = writer.list(tagName);
+        for (byte v : bytes) {
+          listWriter.integer().writeInt(v);
+        }
+      } else {
+        writer.varBinary(tagName).writeVarBinary(0, bytes.length, drillBuffer(bytes));
+      }
+    } else if (value instanceof short[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (short v : (short[]) value) {
+        // SMALLINT is not supported
+        listWriter.integer().writeInt(v);
+      }
+    } else if (value instanceof int[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (int v : (int[]) value) {
+        listWriter.integer().writeInt(v);
+      }
+    } else if (value instanceof long[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (long v : (long[]) value) {
+        listWriter.bigInt().writeBigInt(v);
+      }
+    } else if (value instanceof float[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (float v : (float[]) value) {
+        listWriter.float4().writeFloat4(v);
+      }
+    } else if (value instanceof double[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (double v : (double[]) value) {
+        listWriter.float8().writeFloat8(v);
+      }
+    } else if (value instanceof Rational[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (Rational v : (Rational[]) value) {
+        listWriter.float8().writeFloat8(v.doubleValue());
+      }
+    } else if (value instanceof String[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (String v : (String[]) value) {
+        writeString(listWriter.varChar(), v);
+      }
+    } else if (value instanceof StringValue[]) {
+      ListWriter listWriter = writer.list(tagName);
+      for (StringValue v : (StringValue[]) value) {
+        writeString(listWriter.varChar(), v.toString());
+      }
+    } else if (value instanceof JpegComponent) {
+      final JpegComponent v = (JpegComponent) value;
+      writer.map(tagName).integer("ComponentId").writeInt(v.getComponentId());
+      writer.map(tagName).integer("HorizontalSamplingFactor").writeInt(v.getHorizontalSamplingFactor());
+      writer.map(tagName).integer("VerticalSamplingFactor").writeInt(v.getVerticalSamplingFactor());
+      writer.map(tagName).integer("QuantizationTableNumber").writeInt(v.getQuantizationTableNumber());
+    } else if (value instanceof List<?>) {
+      ListWriter listWriter = writer.list(tagName);
+      for (Object v : (List<?>) value) {
+        if (v instanceof KeyValuePair) {
+          listWriter.map().start();
+          writeString(listWriter.map().varChar("Key"), ((KeyValuePair) v).getKey());
+          writeString(listWriter.map().varChar("Value"), ((KeyValuePair) v).getValue().toString());
+          listWriter.map().end();
+        } else {
+          writeString(listWriter.varChar(), v.toString());
+        }
+      }
+    } else {
+      writeString(writer.varChar(tagName), value.toString());
+    }
+  }
+
+  private void writeString(final VarCharWriter writer, final String value) {
+    final byte[] stringBytes = value.getBytes(Charsets.UTF_8);
+    writer.writeVarChar(0, stringBytes.length, drillBuffer(stringBytes));
+  }
+
+  private String formatName(final String tagName) {
+    StringBuilder builder = new StringBuilder();
+    boolean upperCase = true;
+    for (char c : tagName.toCharArray()) {
+      if (c == ' ' || c == '-' || c == '/') {
+        upperCase = true;
+      } else {
+        builder.append(upperCase ? Character.toUpperCase(c) : c);
+        upperCase = false;
+      }
+    }
+    return builder.toString();
+  }
+
+  private boolean isDescriptionTag(final Directory directory, final int tagType) {
+    return directory instanceof IccDirectory && tagType > 0x20202020 && tagType < 0x7a7a7a7a ||
+        directory instanceof PhotoshopDirectory;
+  }
+
+  private boolean isVersionTag(final Directory directory, final int tagType) {
+    return directory instanceof ExifSubIFDDirectory &&
+        (tagType == ExifSubIFDDirectory.TAG_EXIF_VERSION || tagType == ExifSubIFDDirectory.TAG_FLASHPIX_VERSION) ||
+        directory instanceof ExifInteropDirectory &&
+        tagType == ExifInteropDirectory.TAG_INTEROP_VERSION ||
+        directory instanceof FujifilmMakernoteDirectory &&
+        tagType == FujifilmMakernoteDirectory.TAG_MAKERNOTE_VERSION ||
+        directory instanceof NikonType2MakernoteDirectory &&
+        tagType == NikonType2MakernoteDirectory.TAG_FIRMWARE_VERSION ||
+        directory instanceof OlympusCameraSettingsMakernoteDirectory &&
+        tagType == OlympusCameraSettingsMakernoteDirectory.TagCameraSettingsVersion ||
+        directory instanceof OlympusEquipmentMakernoteDirectory &&
+        tagType == OlympusEquipmentMakernoteDirectory.TAG_EQUIPMENT_VERSION ||
+        directory instanceof OlympusFocusInfoMakernoteDirectory &&
+        tagType == OlympusFocusInfoMakernoteDirectory.TagFocusInfoVersion ||
+        directory instanceof OlympusImageProcessingMakernoteDirectory &&
+        tagType == OlympusImageProcessingMakernoteDirectory.TagImageProcessingVersion ||
+        directory instanceof OlympusMakernoteDirectory &&
+        tagType == OlympusMakernoteDirectory.TAG_MAKERNOTE_VERSION ||
+        directory instanceof OlympusRawDevelopment2MakernoteDirectory &&
+        tagType == OlympusRawDevelopment2MakernoteDirectory.TagRawDevVersion ||
+        directory instanceof OlympusRawDevelopmentMakernoteDirectory &&
+        tagType == OlympusRawDevelopmentMakernoteDirectory.TagRawDevVersion ||
+        directory instanceof OlympusRawInfoMakernoteDirectory &&
+        tagType == OlympusRawInfoMakernoteDirectory.TagRawInfoVersion ||
+        directory instanceof PanasonicMakernoteDirectory &&
+        (tagType == PanasonicMakernoteDirectory.TAG_FIRMWARE_VERSION || tagType == PanasonicMakernoteDirectory.TAG_MAKERNOTE_VERSION || tagType == PanasonicMakernoteDirectory.TAG_EXIF_VERSION) ||
+        directory instanceof SamsungType2MakernoteDirectory &&
+        tagType == SamsungType2MakernoteDirectory.TagMakerNoteVersion ||
+        directory instanceof SonyType6MakernoteDirectory &&
+        tagType == SonyType6MakernoteDirectory.TAG_MAKERNOTE_THUMB_VERSION ||
+        directory instanceof PanasonicRawIFD0Directory &&
+        tagType == PanasonicRawIFD0Directory.TagPanasonicRawVersion;
+  }
+
+  private boolean isDateTag(final Directory directory, final int tagType) {
+    return directory instanceof IccDirectory && tagType == IccDirectory.TAG_PROFILE_DATETIME ||
+        directory instanceof PngDirectory && tagType == PngDirectory.TAG_LAST_MODIFICATION_TIME;
+  }
+
+  @Override
+  public void close() throws Exception {
+    if (metadataStream != null) {
+      metadataStream.close();
+    }
+  }
+}
diff --git a/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json b/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
index 0b6add0..417635a 100644
--- a/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
+++ b/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
@@ -58,6 +58,15 @@
           extensions: [ "csvh" ],
           delimiter: ",",
           extractHeader: true
+        },
+        "image" : {
+          type: "image",
+          extensions: [
+            "jpg", "jpeg", "jpe", "tif", "tiff", "dng", "psd", "png", "bmp", "gif",
+            "ico", "pcx", "wav", "wave", "avi", "webp", "mov", "mp4", "m4a", "m4p",
+            "m4b", "m4r", "m4v", "3gp", "3g2", "eps", "epsf", "epsi", "ai", "arw",
+            "crw", "cr2", "nef", "orf", "raf", "rw2", "rwl", "srw", "x3f"
+          ]
         }
       }
     },
@@ -147,6 +156,15 @@
           extensions: [ "csvh" ],
           delimiter: ",",
           extractHeader: true
+        },
+        "image" : {
+          type: "image",
+          extensions: [
+            "jpg", "jpeg", "jpe", "tif", "tiff", "dng", "psd", "png", "bmp", "gif",
+            "ico", "pcx", "wav", "wave", "avi", "webp", "mov", "mp4", "m4a", "m4p",
+            "m4b", "m4r", "m4v", "3gp", "3g2", "eps", "epsf", "epsi", "ai", "arw",
+            "crw", "cr2", "nef", "orf", "raf", "rw2", "rwl", "srw", "x3f"
+          ]
         }
       }
     }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
index 3ac675b..8b73b53 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFormatPluginOptionExtractor.java
@@ -26,6 +26,7 @@ import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.scanner.RunTimeScan;
 import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.exec.store.easy.text.TextFormatPlugin.TextFormatConfig;
+import org.apache.drill.exec.store.image.ImageFormatConfig;
 import org.junit.Test;
 
 import com.fasterxml.jackson.annotation.JsonTypeName;
@@ -65,6 +66,12 @@ public class TestFormatPluginOptionExtractor {
         case "httpd":
           assertEquals("(type: String, logFormat: String, timestampFormat: String)", d.presentParams());
           break;
+        case "image":
+          assertEquals(ImageFormatConfig.class, d.pluginConfigClass);
+          assertEquals(
+              "(type: String, fileSystemMetadata: boolean, descriptive: boolean, timeZone: String)", d.presentParams()
+          );
+          break;
         default:
           fail("add validation for format plugin type " + d.typeName);
       }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
new file mode 100644
index 0000000..e5d513b
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/image/TestImageRecordReader.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.exec.store.image;
+
+import java.util.TimeZone;
+
+import org.apache.drill.test.BaseTestQuery;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestImageRecordReader extends BaseTestQuery {
+
+  private static TimeZone defaultTimeZone;
+
+  @BeforeClass
+  public static void setUp() {
+    defaultTimeZone = TimeZone.getDefault();
+    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+  }
+
+  private void createAndQuery(String tableName, String imageFile) throws Exception {
+    final String query = String.format(
+      "select * from table(cp.`store/image/%s`(type => 'image', fileSystemMetadata => false))",
+      imageFile);
+
+    runSQL("alter session set `store.format`='json'");
+    test("create table dfs.tmp.`%s` as %s", tableName, query);
+
+    testBuilder()
+      .sqlQuery("select * from dfs.tmp.`%s`", tableName)
+      .ordered()
+      .jsonBaselineFile("store/image/" + tableName + ".json")
+      .go();
+    runSQL("alter session set `store.format` = 'parquet'");
+  }
+
+  @Test
+  public void testBmpImage() throws Exception {
+    createAndQuery("bmp", "rose-128x174-24bit.bmp");
+  }
+
+  @Test
+  public void testGifImage() throws Exception {
+    createAndQuery("gif", "rose-128x174-8bit-alpha.gif");
+  }
+
+  @Test
+  public void testIcoImage() throws Exception {
+    createAndQuery("ico", "rose-32x32-32bit-alpha.ico");
+  }
+
+  @Test
+  public void testJpegImage() throws Exception {
+    createAndQuery("jpeg", "withExifAndIptc.jpg");
+  }
+
+  @Test
+  public void testPcxImage() throws Exception {
+    createAndQuery("pcx", "rose-128x174-24bit.pcx");
+  }
+
+  @Test
+  public void testPngImage() throws Exception {
+    createAndQuery("png", "rose-128x174-32bit-alpha.png");
+  }
+
+  @Test
+  public void testPsdImage() throws Exception {
+    createAndQuery("psd", "rose-128x174-32bit-alpha.psd");
+  }
+
+  @Test
+  public void testTiffImage() throws Exception {
+    createAndQuery("tiff", "rose-128x174-24bit-lzw.tiff");
+  }
+
+  @Test
+  public void testWavImage() throws Exception {
+    createAndQuery("wav", "sample.wav");
+  }
+
+  @Test
+  public void testAviImage() throws Exception {
+    createAndQuery("avi", "sample.avi");
+  }
+
+  @Test
+  public void testWebpImage() throws Exception {
+    createAndQuery("webp", "1_webp_a.webp");
+  }
+
+  @Test
+  public void testMovImage() throws Exception {
+    createAndQuery("mov", "sample.mov");
+  }
+
+  @Test
+  public void testMp4Image() throws Exception {
+    createAndQuery("mp4", "sample.mp4");
+  }
+
+  @Test
+  public void testEpsImage() throws Exception {
+    createAndQuery("eps", "adobeJpeg1.eps");
+  }
+
+  @AfterClass
+  public static void cleanUp() {
+    TimeZone.setDefault(defaultTimeZone);
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/1_webp_a.webp b/exec/java-exec/src/test/resources/store/image/1_webp_a.webp
new file mode 100644
index 0000000..f7dc208
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/1_webp_a.webp differ
diff --git a/exec/java-exec/src/test/resources/store/image/adobeJpeg1.eps b/exec/java-exec/src/test/resources/store/image/adobeJpeg1.eps
new file mode 100644
index 0000000..b3941d6
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/adobeJpeg1.eps differ
diff --git a/exec/java-exec/src/test/resources/store/image/avi.json b/exec/java-exec/src/test/resources/store/image/avi.json
new file mode 100644
index 0000000..f97d2db
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/avi.json
@@ -0,0 +1,32 @@
+{
+  "Format" : "AVI",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "0",
+  "DPIHeight" : "0",
+  "PixelWidth" : "320",
+  "PixelHeight" : "240",
+  "BitsPerPixel" : "0",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:06",
+  "VideoCodec" : "XVID",
+  "FrameRate" : "25",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "38.281",
+  "FileType" : {
+    "DetectedFileTypeName" : "AVI",
+    "DetectedFileTypeLongName" : "Audio Video Interleaved",
+    "DetectedMIMEType" : "video/vnd.avi",
+    "ExpectedFileNameExtension" : "avi"
+  },
+  "AVI" : {
+    "Width" : "320 pixels",
+    "Height" : "240 pixels",
+    "StreamCount" : "2",
+    "FramesPerSecond" : "25",
+    "Duration" : "00:00:06",
+    "VideoCodec" : "XVID",
+    "SamplesPerSecond" : "38.281"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/bmp.json b/exec/java-exec/src/test/resources/store/image/bmp.json
new file mode 100644
index 0000000..da9d2e2
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/bmp.json
@@ -0,0 +1,36 @@
+{
+  "Format" : "BMP",
+  "PixelWidth" : "128",
+  "PixelHeight" : "174",
+  "DPIWidth" : "71.984",
+  "DPIHeight" : "71.984",
+  "BitsPerPixel" : "24",
+  "Orientaion" : "Unknown (0)",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "BMP",
+    "DetectedFileTypeLongName" : "Device Independent Bitmap",
+    "DetectedMIMEType" : "image/bmp",
+    "ExpectedFileNameExtension" : "bmp"
+  },
+  "BMPHeader" : {
+    "BitmapType" : "Standard",
+    "HeaderSize" : "40",
+    "ImageWidth" : "128",
+    "ImageHeight" : "174",
+    "Planes" : "1",
+    "BitsPerPixel" : "24",
+    "Compression" : "None",
+    "XPixelsPerMeter" : "2834",
+    "YPixelsPerMeter" : "2834",
+    "PaletteColourCount" : "0",
+    "ImportantColourCount" : "0"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/eps.json b/exec/java-exec/src/test/resources/store/image/eps.json
new file mode 100644
index 0000000..08d2268
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/eps.json
@@ -0,0 +1,116 @@
+{
+  "Format" : "EPS",
+  "Orientaion" : "Top, left side (Horizontal / normal)",
+  "DPIWidth" : "101",
+  "DPIHeight" : "101",
+  "PixelWidth" : "275",
+  "PixelHeight" : "207",
+  "BitsPerPixel" : "24",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "EPS",
+    "DetectedFileTypeLongName" : "Encapsulated PostScript",
+    "DetectedMIMEType" : "application/postscript",
+    "ExpectedFileNameExtension" : "eps"
+  },
+  "ExifIFD0" : {
+    "ImageWidth" : "275 pixels",
+    "ImageHeight" : "207 pixels",
+    "BitsPerSample" : "8 8 8 bits/component/pixel",
+    "PhotometricInterpretation" : "RGB",
+    "Orientation" : "Top, left side (Horizontal / normal)",
+    "SamplesPerPixel" : "3 samples/pixel",
+    "XResolution" : "101 dots per inch",
+    "YResolution" : "101 dots per inch",
+    "ResolutionUnit" : "Inch",
+    "Software" : "Adobe Photoshop CC 2017 (Macintosh)",
+    "DateTime" : "2017:08:16 12:24:54",
+    "Copyright" : "1999 Lars Borg"
+  },
+  "ExifSubIFD" : {
+    "ExifVersion" : "2.21",
+    "ColorSpace" : "Undefined",
+    "ExifImageWidth" : "275 pixels",
+    "ExifImageHeight" : "207 pixels"
+  },
+  "ExifThumbnail" : {
+    "Compression" : "JPEG (old-style)",
+    "XResolution" : "72 dots per inch",
+    "YResolution" : "72 dots per inch",
+    "ResolutionUnit" : "Inch",
+    "ThumbnailOffset" : "414 bytes",
+    "ThumbnailLength" : "0 bytes"
+  },
+  "ICCProfile" : {
+    "ProfileSize" : "532",
+    "CMMType" : "ADBE",
+    "Version" : "2.1.0",
+    "Class" : "Display Device",
+    "ColorSpace" : "RGB ",
+    "ProfileConnectionSpace" : "XYZ ",
+    "ProfileDateTime" : "1999:04:05 15:08:05",
+    "Signature" : "acsp",
+    "PrimaryPlatform" : "Apple Computer, Inc.",
+    "DeviceManufacturer" : "none",
+    "RenderingIntent" : "Media-Relative Colorimetric",
+    "XYZValues" : "0.964 1 0.825",
+    "TagCount" : "10",
+    "Copyright" : "(c) 1999 Adobe Systems Inc.",
+    "ProfileDescription" : "GBR",
+    "MediaWhitePoint" : "(0.9505, 1, 1.0891)",
+    "MediaBlackPoint" : "(0, 0, 0)",
+    "RedTRC" : "0.0085908",
+    "GreenTRC" : "0.0085908",
+    "BlueTRC" : "0.0085908",
+    "RedColorant" : "(0.3851, 0.7169, 0.0971)",
+    "GreenColorant" : "(0.1431, 0.0606, 0.7139)",
+    "BlueColorant" : "(0.436, 0.2225, 0.0139)"
+  },
+  "IPTC": {
+    "CodedCharacterSet" : "UTF-8",
+    "ApplicationRecordVersion" : "2",
+    "CopyrightNotice" : "1999 Lars Borg"
+  },
+  "Photoshop" : {
+    "CaptionDigest" : "218 119 165 163 16 30 63 186 160 177 8 58 1 54 252 149",
+    "PrintInfo2" : "[229 values]",
+    "PrintStyle" : "[557 values]",
+    "ResolutionInfo" : "101x101 DPI",
+    "PrintScale" : "Centered, Scale 1.0",
+    "GlobalAngle" : "30",
+    "GlobalAltitude" : "30",
+    "PrintFlags" : "0 0 0 0 0 0 0 0 1",
+    "CopyrightFlag" : "Yes",
+    "PrintFlagsInformation" : "0 1 0 0 0 0 0 0 0 1",
+    "ColorHalftoningInformation" : "[72 values]",
+    "ColorTransferFunctions" : "[112 values]",
+    "GridAndGuidesInformation" : "0 0 0 1 0 0 2 64 0 0 2 64 0 0 0 0",
+    "URLList" : "0",
+    "Slices" : " (0,0,207,275) 1 Slices",
+    "PixelAspectRatio" : "1.0",
+    "SeedNumber" : "1",
+    "ThumbnailData" : "JpegRGB, 159x120, Decomp 57600 bytes, 1572865 bpp, 8151 bytes",
+    "VersionInfo" : "1 (Adobe Photoshop, Adobe Photoshop CC 2017) 1",
+    "EPSOptions" : "1 1 0 0 0 0 0 0"
+  },
+  "EPS" : {
+    "TIFFPreviewSize" : "41802 bytes",
+    "TIFFPreviewOffset" : "30 bytes",
+    "Creator" : "Adobe Photoshop Version 2017.1.1 20170425.r.252 2017/04/25:23:00:00 CL 1113967",
+    "Title" : "adobeJpeg1.eps",
+    "CreationDate" : "8/16/17 12:24 PM",
+    "BoundingBox" : "0 0 196 148",
+    "ImageData" : "275 207 8 3 0 1 3 \"beginimage\"",
+    "ImageWidth" : "275 pixels",
+    "ImageHeight" : "207 pixels",
+    "ColorType" : "RGB",
+    "RamSize" : "170775"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/gif.json b/exec/java-exec/src/test/resources/store/image/gif.json
new file mode 100644
index 0000000..a05a78e
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/gif.json
@@ -0,0 +1,47 @@
+{
+  "Format" : "GIF",
+  "PixelWidth" : "128",
+  "PixelHeight" : "174",
+  "ColorMode" : "Indexed",
+  "BitsPerPixel" : "8",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "0",
+  "DPIHeight" : "0",
+  "HasAlpha" : "true",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {"DetectedFileTypeName" : "GIF",
+    "DetectedFileTypeLongName" : "Graphics Interchange Format",
+    "DetectedMIMEType" : "image/gif",
+    "ExpectedFileNameExtension" : "gif"
+  },
+  "GIFHeader" : {
+    "GIFFormatVersion" : "89a",
+    "ImageWidth" : "128",
+    "ImageHeight" : "174",
+    "ColorTableSize" : "256",
+    "IsColorTableSorted" : "false",
+    "BitsPerPixel" : "8",
+    "HasGlobalColorTable" : "true",
+    "BackgroundColorIndex" : "0"
+  },
+  "GIFControl" : {
+    "DisposalMethod" : "Not Specified",
+    "UserInputFlag" : "false",
+    "TransparentColorFlag" : "true",
+    "Delay" : "0",
+    "TransparentColorIndex" : "255"
+  },
+  "GIFImage" : {
+    "Left" : "0",
+    "Top" : "0",
+    "Width" : "128",
+    "Height" : "174",
+    "HasLocalColourTable" : "false",
+    "IsInterlaced" : "false"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/ico.json b/exec/java-exec/src/test/resources/store/image/ico.json
new file mode 100644
index 0000000..27466ad
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/ico.json
@@ -0,0 +1,33 @@
+{
+  "Format" : "ICO",
+  "PixelWidth" : "32",
+  "PixelHeight" : "32",
+  "BitsPerPixel" : "32",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "0",
+  "DPIHeight" : "0",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "true",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "ICO",
+    "DetectedFileTypeLongName" : "Windows Icon",
+    "DetectedMIMEType" : "image/x-icon",
+    "ExpectedFileNameExtension" : "ico"
+  },
+  "ICO" : {
+    "ImageType" : "Icon",
+    "ImageWidth" : "32 pixels",
+    "ImageHeight" : "32 pixels",
+    "ColourPaletteSize" : "No palette",
+    "ColourPlanes" : "1",
+    "BitsPerPixel" : "32",
+    "ImageSizeBytes" : "4264",
+    "ImageOffsetBytes" : "22"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/jpeg.json b/exec/java-exec/src/test/resources/store/image/jpeg.json
new file mode 100644
index 0000000..2bb357b
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/jpeg.json
@@ -0,0 +1,213 @@
+{
+  "Format" : "JPEG",
+  "DPIWidth" : "300",
+  "DPIHeight" : "300",
+  "PixelWidth" : "600",
+  "PixelHeight" : "400",
+  "BitsPerPixel" : "24",
+  "Orientaion" : "Top, left side (Horizontal / normal)",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "JPEG",
+    "DetectedFileTypeLongName" : "Joint Photographic Experts Group",
+    "DetectedMIMEType" : "image/jpeg",
+    "ExpectedFileNameExtension" : "jpg"
+  },
+  "JFIF" : {
+    "Version" : "1.2",
+    "ResolutionUnits" : "inch",
+    "XResolution" : "300 dots",
+    "YResolution" : "300 dots",
+    "ThumbnailWidthPixels" : "0",
+    "ThumbnailHeightPixels" : "0"
+  },
+  "ExifIFD0" : {
+    "ImageDescription" : "Communications","Make" : "FUJIFILM",
+    "Model" : "FinePixS1Pro",
+    "Orientation" : "Top, left side (Horizontal / normal)",
+    "XResolution" : "300 dots per inch",
+    "YResolution" : "300 dots per inch",
+    "ResolutionUnit" : "Inch",
+    "Software" : "Adobe Photoshop 7.0",
+    "DateTime" : "2002:07:19 13:28:10",
+    "Artist" : "Ian Britton",
+    "YCbCrPositioning" : "Datum point",
+    "ReferenceBlackWhite" : "[0,128,128] [255,255,255]",
+    "Copyright" : "ian Britton - FreeFoto.com"
+  },
+  "ExifSubIFD" : {
+    "FNumber" : "f/0.6",
+    "ExposureProgram" : "Shutter priority",
+    "ISOSpeedRatings" : "0",
+    "ExifVersion" : "2.00",
+    "DateTimeOriginal" : "2002:07:13 15:58:28",
+    "DateTimeDigitized" : "2002:07:13 15:58:28",
+    "ComponentsConfiguration" : "YCbCr",
+    "ShutterSpeedValue" : "1/724 sec",
+    "ApertureValue" : "f/16.0",
+    "BrightnessValue" : "333/1280",
+    "ExposureBiasValue" : "-1090519041/1677721600 EV",
+    "MeteringMode" : "Multi-segment",
+    "Flash" : "Flash did not fire",
+    "FocalLength" : "0 mm",
+    "FlashPixVersion" : "1.00",
+    "ColorSpace" : "sRGB",
+    "ExifImageWidth" : "2400 pixels",
+    "ExifImageHeight" : "1600 pixels",
+    "FocalPlaneXResolution" : "256/3085 inches",
+    "FocalPlaneYResolution" : "256/3085 inches",
+    "FocalPlaneResolutionUnit" : "Inches",
+    "SensingMethod" : "One-chip color area sensor",
+    "FileSource" : "Unknown (0)",
+    "SceneType" : "Unknown (0)"
+  },
+  "GPS" : {
+    "GPSVersionID" : "2.000",
+    "GPSLatitudeRef" : "N",
+    "GPSLatitude" : "54° 59' 22.8\"",
+    "GPSLongitudeRef" : "W",
+    "GPSLongitude" : "-1° 54' 51\"",
+    "GPSTimeStamp" : "14:58:24.000 UTC",
+    "GPSMapDatum" : "WGS84"
+  },
+  "ExifThumbnail" : {
+    "Compression" : "JPEG (old-style)",
+    "XResolution" : "72 dots per inch",
+    "YResolution" : "72 dots per inch",
+    "ResolutionUnit" : "Inch",
+    "ThumbnailOffset" : "1038 bytes",
+    "ThumbnailLength" : "3662 bytes"
+  },
+  "XMP" : {
+    "XMPValueCount" : "33",
+    "Photoshop" : {
+      "AuthorsPosition" : "Photographer",
+      "Urgency" : "5",
+      "SupplementalCategories" : ["Communications"],
+      "DateCreated" : "2002-06-20",
+      "Credit" : "Ian Britton",
+      "CaptionWriter" : "Ian Britton",
+      "City" : " ",
+      "Headline" : "Communications",
+      "State" : " ",
+      "Source" : "FreeFoto.com",
+      "Category" : "BUS",
+      "Country" : "Ubited Kingdom"
+    },
+    "Dc" : {
+      "Creator" : ["Ian Britton"],
+      "Description" : "Communications",
+      "Rights" : "ian Britton - FreeFoto.com",
+      "Title" : "Communications",
+      "Subject" : ["Communications"]
+    },
+    "XmpMM" : {
+      "DocumentID" : "adobe:docid:photoshop:84d4dba8-9b11-11d6-895d-c4d063a70fb0",
+      "InstanceID" : "uuid:3ff5d382-9b12-11d6-895d-c4d063a70fb0"
+    },
+    "XmpBJ" : {
+      "JobRef" : [{
+        "Name" : "Photographer"
+      }]
+    },
+    "XmpRights" : {
+      "Marked" : "True",
+      "WebStatement" : "www.freefoto.com"
+    }
+  },
+  "ICCProfile": {
+    "ProfileSize" : "3144",
+    "CMMType" : "Lino",
+    "Version" : "2.1.0",
+    "Class" : "Display Device","ColorSpace" : "RGB ",
+    "ProfileConnectionSpace" : "XYZ ",
+    "ProfileDateTime" : "1998:02:09 06:49:00",
+    "Signature" : "acsp",
+    "PrimaryPlatform" : "Microsoft Corporation",
+    "DeviceManufacturer" : "IEC ",
+    "DeviceModel" : "sRGB",
+    "XYZValues" : "0.964 1 0.825",
+    "TagCount" : "17",
+    "Copyright" : "Copyright (c) 1998 Hewlett-Packard Company",
+    "ProfileDescription" : "sRGB IEC61966-2.1",
+    "MediaWhitePoint" : "(0.9505, 1, 1.0891)",
+    "MediaBlackPoint" : "(0, 0, 0)",
+    "RedColorant" : "(0.4361, 0.2225, 0.0139)",
+    "GreenColorant" : "(0.3851, 0.7169, 0.0971)",
+    "BlueColorant" : "(0.1431, 0.0606, 0.7141)",
+    "DeviceMfgDescription" : "IEC http://www.iec.ch",
+    "DeviceModelDescription" : "IEC 61966-2.1 Default RGB colour space - sRGB",
+    "ViewingConditionsDescription" : "Reference Viewing Condition in IEC61966-2.1",
+    "ViewingConditions" : "view (0x76696577): 36 bytes",
+    "Luminance" : "(76.0365, 80, 87.1246)",
+    "Measurement" : "1931 2° Observer, Backing (0, 0, 0), Geometry Unknown, Flare 1%, Illuminant D65",
+    "Technology" : "CRT ",
+    "RedTRC" : "0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502, 0 [...]
+    "GreenTRC" : "0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502, [...]
+    "BlueTRC" : "0.0, 0.0000763, 0.0001526, 0.0002289, 0.0003052, 0.0003815, 0.0004578, 0.0005341, 0.0006104, 0.0006867, 0.000763, 0.0008392, 0.0009003, 0.0009766, 0.0010529, 0.0011292, 0.0012055, 0.0012818, 0.0013581, 0.0014343, 0.0015106, 0.0015869, 0.0016632, 0.0017395, 0.0018158, 0.0018921, 0.0019684, 0.0020447, 0.002121, 0.0021973, 0.0022736, 0.0023499, 0.0024262, 0.0025025, 0.0025788, 0.0026551, 0.0027161, 0.0027924, 0.0028687, 0.002945, 0.0030213, 0.0030976, 0.0031739, 0.0032502,  [...]
+  },
+  "JPEG" : {
+    "CompressionType" : "Baseline",
+    "DataPrecision" : "8 bits",
+    "ImageHeight" : "400 pixels",
+    "ImageWidth" : "600 pixels",
+    "NumberOfComponents" : "3",
+    "Component1" : "Y component: Quantization table 0, Sampling factors 2 horiz/2 vert",
+    "Component2" : "Cb component: Quantization table 1, Sampling factors 1 horiz/1 vert",
+    "Component3" : "Cr component: Quantization table 1, Sampling factors 1 horiz/1 vert"
+  },
+  "IPTC" : {
+    "ApplicationRecordVersion" : "2",
+    "CaptionAbstract" : "Communications",
+    "CaptionWriterEditor" : "Ian Britton",
+    "Headline" : "Communications",
+    "ByLine" : "Ian Britton",
+    "ByLineTitle" : "Photographer",
+    "Credit" : "Ian Britton",
+    "Source" : "FreeFoto.com",
+    "ObjectName" : "Communications",
+    "DateCreated" : "2002:06:20",
+    "City" : " ",
+    "ProvinceState" : " ",
+    "CountryPrimaryLocationName" : "Ubited Kingdom",
+    "Category" : "BUS",
+    "SupplementalCategory(s)" : "Communications",
+    "Urgency" : "53",
+    "Keywords" : "Communications",
+    "CopyrightNotice" : "ian Britton - FreeFoto.com"
+  },
+  "Photoshop" : {
+    "CaptionDigest" : "245 138 68 109 96 203 177 136 63 66 1 237 68 32 172 54",
+    "ResolutionInfo" : "300x300 DPI",
+    "PrintScale" : "Centered, Scale 1.0",
+    "GlobalAngle" : "30","GlobalAltitude" : "30",
+    "PrintFlags" : "0 0 0 0 0 0 0 0 1",
+    "CopyrightFlag" : "Yes","URL" : "www.freefoto.com",
+    "PrintFlagsInformation" : "0 1 0 0 0 0 0 0 0 2",
+    "ColorHalftoningInformation" : "[72 values]",
+    "ColorTransferFunctions" : "[112 values]",
+    "GridAndGuidesInformation" : "0 0 0 1 0 0 2 64 0 0 2 64 0 0 0 0",
+    "URLList" : "0",
+    "Slices" : "04_02_10_a5 (0,0,1600,2400) 1 Slices",
+    "SeedNumber" : "1",
+    "ThumbnailData" : "JpegRGB, 128x85, Decomp 32640 bytes, 1572865 bpp, 3662 bytes",
+    "VersionInfo" : "1 (Adobe Photoshop, Adobe Photoshop 7.0) 1",
+    "JPEGQuality" : "9 (High), Standard format, 3 scans"
+  },
+  "AdobeJPEG": {
+    "DCTEncodeVersion" : "25600",
+    "Flags0" : "64",
+    "Flags1" : "0",
+    "ColorTransform" : "YCbCr"
+  },
+  "Huffman" : {
+    "NumberOfTables" : "4 Huffman tables"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/mov.json b/exec/java-exec/src/test/resources/store/image/mov.json
new file mode 100644
index 0000000..bf174ca
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/mov.json
@@ -0,0 +1,67 @@
+{
+  "Format" : "MOV",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "72",
+  "DPIHeight" : "72",
+  "PixelWidth" : "560",
+  "PixelHeight" : "320",
+  "BitsPerPixel" : "0",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:05",
+  "VideoCodec" : "MPEG-4",
+  "FrameRate" : "0",
+  "AudioCodec" : "MPEG-4, Advanced Audio Coding (AAC)",
+  "AudioSampleSize" : "16",
+  "AudioSampleRate" : "44100",
+  "FileType" : {
+    "DetectedFileTypeName" : "MOV",
+    "DetectedFileTypeLongName" : "QuickTime Movie",
+    "DetectedMIMEType" : "video/quicktime",
+    "ExpectedFileNameExtension" : "mov"
+  },
+  "QuickTime" : {
+    "MajorBrand" : "Apple QuickTime (.MOV/QT)",
+    "MinorVersion" : "512",
+    "CompatibleBrands" : "[Apple QuickTime (.MOV/QT)]",
+    "CreationTime" : "Fri Jan 01 00:00:00 +00:00 1904",
+    "ModificationTime" : "Fri Jan 01 00:00:00 +00:00 1904",
+    "Duration" : "00:00:05",
+    "MediaTimeScale" : "1000",
+    "PreferredRate" : "1",
+    "PreferredVolume" : "1",
+    "PreviewTime" : "0",
+    "PreviewDuration" : "0",
+    "PosterTime" : "0",
+    "SelectionTime" : "0",
+    "SelectionDuration" : "0",
+    "CurrentTime" : "0",
+    "NextTrackID" : "3"
+  },
+  "QuickTimeVideo" : {
+    "CreationTime" : "Fri Jan 01 00:00:00 UTC 1904",
+    "ModificationTime" : "Fri Jan 01 00:00:00 UTC 1904",
+    "Opcolor" : "0 0 0",
+    "GraphicsMode" : "Copy",
+    "Vendor" : "FFmpeg",
+    "CompressionType" : "MPEG-4",
+    "TemporalQuality" : "512",
+    "SpatialQuality" : "512",
+    "Width" : "560 pixels",
+    "Height" : "320 pixels",
+    "CompressorName" : "mpeg4",
+    "Depth" : "Unknown (0)",
+    "ColorTable" : "Color table within file",
+    "HorizontalResolution" : "72",
+    "VerticalResolution" : "72"
+  },
+  "QuickTimeSound" : {
+    "CreationTime" : "Fri Jan 01 00:00:00 UTC 1904",
+    "ModificationTime" : "Fri Jan 01 00:00:00 UTC 1904",
+    "Balance" : "0",
+    "Format" : "MPEG-4, Advanced Audio Coding (AAC)",
+    "NumberOfChannels" : "1",
+    "SampleSize" : "16",
+    "SampleRate" : "44100"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/mp4.json b/exec/java-exec/src/test/resources/store/image/mp4.json
new file mode 100644
index 0000000..1e581c3
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/mp4.json
@@ -0,0 +1,56 @@
+{
+  "Format" : "MP4",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "72",
+  "DPIHeight" : "72",
+  "PixelWidth" : "560",
+  "PixelHeight" : "320",
+  "BitsPerPixel" : "24",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:05",
+  "VideoCodec" : "JVT/AVC Coding",
+  "FrameRate" : "30",
+  "AudioCodec" : "Sat Mar 20 21:29:11 UTC 2010",
+  "AudioSampleSize" : "16",
+  "AudioSampleRate" : "48000",
+  "FileType" : {
+    "DetectedFileTypeName" : "MP4",
+    "DetectedFileTypeLongName" : "MPEG-4 Part 14",
+    "DetectedMIMEType" : "video/mp4",
+    "ExpectedFileNameExtension" : "mp4"
+  },
+  "MP4" : {
+    "MajorBrand" : "MP4 v2 [ISO 14496-14]",
+    "MinorVersion" : "0",
+    "CompatibleBrands" : "[MP4 v2 [ISO 14496-14], MP4  Base Media v1 [IS0 14496-12:2003], MP4 Base w/ AVC ext [ISO 14496-12:2005]]",
+    "CreationTime" : "Sat Mar 20 21:29:11 UTC 2010",
+    "ModificationTime" : "Sat Mar 20 21:29:12 UTC 2010",
+    "Duration" : "00:00:05",
+    "MediaTimeScale" : "90000",
+    "TransformationMatrix" : "65536 0 0 0 65536 0 0 0 1073741824",
+    "PreferredRate" : "1",
+    "PreferredVolume" : "1",
+    "NextTrackID" : "3"
+  },
+  "MP4Video" : {
+    "Vendor" : "Sat Mar 20 21:29:11 UTC 2010",
+    "TemporalQuality" : "Sat Mar 20 21:29:12 UTC 2010",
+    "Width" : "560 pixels",
+    "Opcolor" : "0 0 0",
+    "GraphicsMode" : "Copy",
+    "Height" : "320 pixels",
+    "CompressionType" : "JVT/AVC Coding",
+    "Depth" : "Unknown (24)",
+    "HorizontalResolution" : "72",
+    "VerticalResolution" : "72",
+    "FrameRate" : "30"
+  },
+  "MP4Sound" : {
+    "Format" : "Sat Mar 20 21:29:11 UTC 2010",
+    "NumberOfChannels" : "1",
+    "SampleRate" : "48000",
+    "Balance" : "0",
+    "SampleSize" : "16"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/pcx.json b/exec/java-exec/src/test/resources/store/image/pcx.json
new file mode 100644
index 0000000..92d4816
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/pcx.json
@@ -0,0 +1,37 @@
+{
+  "Format" : "PCX",
+  "PixelWidth" : "128",
+  "PixelHeight" : "174",
+  "DPIWidth" : "72",
+  "DPIHeight" : "72",
+  "BitsPerPixel" : "24",
+  "HasAlpha" : "false",
+  "Orientaion" : "Unknown (0)",
+  "ColorMode" : "RGB",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "PCX",
+    "DetectedFileTypeLongName" : "PiCture eXchange",
+    "DetectedMIMEType" : "image/x-pcx",
+    "ExpectedFileNameExtension" : "pcx"
+  },
+  "PCX" : {
+    "Version" : "3.0 or better",
+    "BitsPerPixel" : "8",
+    "XMin" : "0",
+    "YMin" : "0",
+    "XMax" : "127",
+    "YMax" : "173",
+    "HorizontalDPI" : "72",
+    "VerticalDPI" : "72",
+    "Palette" : "[48 values]",
+    "ColorPlanes" : "24-bit color",
+    "BytesPerLine" : "128",
+    "PaletteType" : "Color or B&W"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/png.json b/exec/java-exec/src/test/resources/store/image/png.json
new file mode 100644
index 0000000..a883d5b
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/png.json
@@ -0,0 +1,57 @@
+{
+  "Format" : "PNG",
+  "PixelWidth" : "128",
+  "PixelHeight" : "174",
+  "HasAlpha" : "true",
+  "BitsPerPixel" : "32",
+  "DPIWidth" : "72.009",
+  "DPIHeight" : "72.009",
+  "Orientaion" : "Unknown (0)",
+  "ColorMode" : "RGB",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "PNG",
+    "DetectedFileTypeLongName" : "Portable Network Graphics",
+    "DetectedMIMEType" : "image/png",
+    "ExpectedFileNameExtension" : "png"
+  },
+  "PNGIHDR" : {
+    "ImageWidth" : "128",
+    "ImageHeight" : "174",
+    "BitsPerSample" : "8",
+    "ColorType" : "True Color with Alpha",
+    "CompressionType" : "Deflate",
+    "FilterMethod" : "Adaptive",
+    "InterlaceMethod" : "No Interlace"
+  },
+  "PNGTEXt" : {
+    "TextualData" : [
+      "date:create: 2015-06-22T09:06:26-04:00",
+      "date:modify: 2015-06-22T09:06:26-04:00"
+    ]
+  },
+  "PNGSRGB" : {
+    "SRGBRenderingIntent" : "Perceptual"
+  },
+  "PNGPHYs" : {
+    "PixelsPerUnitX" : "2835",
+    "PixelsPerUnitY" : "2835",
+    "UnitSpecifier" : "Metres"
+  },
+  "PNGChromaticities" : {
+    "WhitePointX" : "31269",
+    "WhitePointY" : "32899",
+    "RedX" : "63999",
+    "RedY" : "33001",
+    "GreenX" : "30000",
+    "GreenY" : "60000",
+    "BlueX" : "15000",
+    "BlueY" : "5999"
+  },
+  "PNGBKGD" : { }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/psd.json b/exec/java-exec/src/test/resources/store/image/psd.json
new file mode 100644
index 0000000..a8d646a
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/psd.json
@@ -0,0 +1,119 @@
+{
+  "Format" : "PSD",
+  "Orientaion" : "Top, left side (Horizontal / normal)",
+  "DPIWidth" : "72",
+  "DPIHeight" : "72",
+  "PixelWidth" : "128",
+  "PixelHeight" : "174",
+  "BitsPerPixel" : "32",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "PSD",
+    "DetectedFileTypeLongName" : "Photoshop Document",
+    "DetectedMIMEType" : "image/vnd.adobe.photoshop",
+    "ExpectedFileNameExtension" : "psd"
+  },
+  "ExifIFD0" : {
+    "Orientation" : "Top, left side (Horizontal / normal)",
+    "XResolution" : "72009/1000 dots per inch",
+    "YResolution" : "72009/1000 dots per inch",
+    "ResolutionUnit" : "Inch",
+    "Software" : "Adobe Photoshop CS2 Windows",
+    "DateTime" : "2016:02:06 00:08:57"
+  },
+  "ExifSubIFD" : {
+    "ColorSpace" : "Undefined",
+    "ExifImageWidth" : "128 pixels",
+    "ExifImageHeight" : "174 pixels"
+  },
+  "ExifThumbnail" : {
+    "Compression" : "JPEG (old-style)",
+    "XResolution" : "72 dots per inch",
+    "YResolution" : "72 dots per inch",
+    "ResolutionUnit" : "Inch",
+    "ThumbnailOffset" : "302 bytes",
+    "ThumbnailLength" : "0 bytes"
+  },
+  "XMP" : {
+    "XMPValueCount" : "22",
+    "Xmp" : {
+      "ModifyDate" : "2016-02-06T00:08:57+09:00",
+      "MetadataDate" : "2016-02-06T00:08:57+09:00",
+      "CreatorTool" : "Adobe Photoshop CS2 Windows",
+      "CreateDate" : "2016-02-06T00:08:57+09:00"
+    },
+    "Photoshop" : {
+      "History" : "",
+      "ICCProfile" : "sRGB IEC61966-2.1",
+      "ColorMode" : "3"
+    },
+    "Tiff" : {
+      "XResolution" : "720090/10000",
+      "NativeDigest" : "256,257,258,259,262,274,277,284,530,531,282,283,296,301,318,319,529,532,306,270,271,272,305,315,33432;F3A2BBED3F60568C7329EB637603055D",
+      "ResolutionUnit" : "2",
+      "Orientation" : "1",
+      "YResolution" : "720090/10000"
+    },
+    "XmpMM" : {
+      "DerivedFrom": {
+        "DocumentID" : "uuid:365756FF19CCE511BCD0B4FE57F853AF",
+        "InstanceID" : "uuid:365756FF19CCE511BCD0B4FE57F853AF"
+      },
+      "DocumentID" : "uuid:375756FF19CCE511BCD0B4FE57F853AF",
+      "InstanceID" : "uuid:385756FF19CCE511BCD0B4FE57F853AF"
+    },
+    "Exif" : {
+      "PixelYDimension" : "174",
+      "ColorSpace" : "-1",
+      "PixelXDimension" : "128",
+      "NativeDigest" : "36864,40960,40961,37121,37122,40962,40963,37510,40964,36867,36868,33434,33437,34850,34852,34855,34856,37377,37378,37379,37380,37381,37382,37383,37384,37385,37386,37396,41483,41484,41486,41487,41488,41492,41493,41495,41728,41729,41730,41985,41986,41987,41988,41989,41990,41991,41992,41993,41994,41995,41996,42016,0,2,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,20,22,23,24,25,26,27,28,30;07837EFC5AF54CDBA4998B26FEFAF26C"
+    },
+    "Dc" : {
+      "Format" : "application/vnd.adobe.photoshop"
+    }
+  },
+  "Photoshop" : {
+    "CaptionDigest" : "0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0",
+    "ResolutionInfo" : "72.01x72.01 DPI",
+    "PrintScale" : "Centered, Scale 1.0",
+    "AlphaChannels" : "8 147 167 150 190 149 148 149 170",
+    "UnicodeAlphaNames" : "0 0 0 5 144 15 102 14 144 232 82 6 0 0",
+    "DisplayInfo(Obsolete)" : "0 0 255 255 0 0 0 0 0 0 0 100 1 0",
+    "AlphaIdentifiers" : "0 0 0 0",
+    "GlobalAngle" : "30",
+    "GlobalAltitude" : "30",
+    "PrintFlags" : "0 0 0 0 0 0 0 0 1",
+    "CopyrightFlag" : "No",
+    "PrintFlagsInformation" : "0 1 0 0 0 0 0 0 0 2",
+    "ColorHalftoningInformation" : "[72 values]",
+    "ColorTransferFunctions" : "[112 values]",
+    "LayerStateInformation" : "0 0",
+    "LayersGroupInformation" : "0 0",
+    "LayerGroupsEnabledID" : "1",
+    "LayerSelectionIDs" : "0 1 0 0 0 3",
+    "GridAndGuidesInformation" : "0 0 0 1 0 0 2 64 0 0 2 64 0 0 0 0",
+    "URLList" : "0",
+    "Slices" : "rose-128x174-alpha (0,0,174,128) 1 Slices",
+    "PixelAspectRatio" : "1.0",
+    "ICCUntaggedProfile" : "1",
+    "SeedNumber" : "3",
+    "ThumbnailData" : "JpegRGB, 118x160, Decomp 56960 bytes, 1572865 bpp, 3786 bytes",
+    "VersionInfo" : "1 (Adobe Photoshop, Adobe Photoshop CS2) 1",
+    "PlugIn1Data" : "[268 values]",
+    "PlugIn2Data" : "[28 values]"
+  },
+  "PSDHeader" : {
+    "ChannelCount" : "4 channels",
+    "ImageHeight" : "174 pixels",
+    "ImageWidth" : "128 pixels",
+    "BitsPerChannel" : "8 bits per channel",
+    "ColorMode" : "RGB"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit-lzw.tiff b/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit-lzw.tiff
new file mode 100644
index 0000000..79530d3
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit-lzw.tiff differ
diff --git a/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.bmp b/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.bmp
new file mode 100644
index 0000000..50fec90
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.bmp differ
diff --git a/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.pcx b/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.pcx
new file mode 100644
index 0000000..3ace47d
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-128x174-24bit.pcx differ
diff --git a/exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.png b/exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.png
new file mode 100644
index 0000000..8606080
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.png differ
diff --git a/exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.psd b/exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.psd
new file mode 100644
index 0000000..77643e2
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-128x174-32bit-alpha.psd differ
diff --git a/exec/java-exec/src/test/resources/store/image/rose-128x174-8bit-alpha.gif b/exec/java-exec/src/test/resources/store/image/rose-128x174-8bit-alpha.gif
new file mode 100644
index 0000000..61550b2
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-128x174-8bit-alpha.gif differ
diff --git a/exec/java-exec/src/test/resources/store/image/rose-32x32-32bit-alpha.ico b/exec/java-exec/src/test/resources/store/image/rose-32x32-32bit-alpha.ico
new file mode 100644
index 0000000..027a276
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/rose-32x32-32bit-alpha.ico differ
diff --git a/exec/java-exec/src/test/resources/store/image/sample.avi b/exec/java-exec/src/test/resources/store/image/sample.avi
new file mode 100644
index 0000000..850feab
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/sample.avi differ
diff --git a/exec/java-exec/src/test/resources/store/image/sample.mov b/exec/java-exec/src/test/resources/store/image/sample.mov
new file mode 100644
index 0000000..5e9d178
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/sample.mov differ
diff --git a/exec/java-exec/src/test/resources/store/image/sample.mp4 b/exec/java-exec/src/test/resources/store/image/sample.mp4
new file mode 100644
index 0000000..1fc4788
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/sample.mp4 differ
diff --git a/exec/java-exec/src/test/resources/store/image/sample.wav b/exec/java-exec/src/test/resources/store/image/sample.wav
new file mode 100644
index 0000000..d71512a
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/sample.wav differ
diff --git a/exec/java-exec/src/test/resources/store/image/tiff.json b/exec/java-exec/src/test/resources/store/image/tiff.json
new file mode 100644
index 0000000..9c26a72
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/tiff.json
@@ -0,0 +1,87 @@
+{
+  "Format" : "ARW",
+  "PixelWidth" : "128",
+  "PixelHeight" : "174",
+  "Orientaion" : "Top, left side (Horizontal / normal)",
+  "DPIWidth" : "72",
+  "DPIHeight" : "72",
+  "BitsPerPixel" : "24",
+  "ColorMode" : "RGB",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "ARW",
+    "DetectedFileTypeLongName" : "Sony Camera Raw",
+    "ExpectedFileNameExtension" : "arw"
+  },
+  "ExifIFD0" : {
+    "NewSubfileType" : "Full-resolution image",
+    "ImageWidth" : "128 pixels",
+    "ImageHeight" : "174 pixels",
+    "BitsPerSample" : "8 8 8 bits/component/pixel",
+    "Compression" : "LZW",
+    "PhotometricInterpretation" : "RGB",
+    "StripOffsets" : "23876",
+    "Orientation" : "Top, left side (Horizontal / normal)",
+    "SamplesPerPixel" : "3 samples/pixel",
+    "RowsPerStrip" : "174 rows/strip",
+    "StripByteCounts" : "26556 bytes",
+    "XResolution" : "72009/1000 dots per inch",
+    "YResolution" : "72009/1000 dots per inch",
+    "PlanarConfiguration" : "Chunky (contiguous for each subsampling pixel)",
+    "ResolutionUnit" : "Inch",
+    "Software" : "Adobe Photoshop CS2 Windows",
+    "DateTime" : "2016:02:05 01:25:42",
+    "Predictor" : "2",
+    "UnknownTag(0x8649)" : "[5390 values]",
+    "InterColorProfile" : "[3144 values]"
+  },
+  "ExifSubIFD" : {
+    "ColorSpace" : "sRGB",
+    "ExifImageWidth" : "128 pixels",
+    "ExifImageHeight" : "174 pixels"
+  },
+  "XMP" : {
+    "XMPValueCount" : "22",
+    "Xmp" : {
+      "ModifyDate" : "2016-02-05T01:25:42+09:00",
+      "MetadataDate" : "2016-02-05T01:25:42+09:00",
+      "CreatorTool" : "Adobe Photoshop CS2 Windows",
+      "CreateDate" : "2016-02-05T01:25:42+09:00"
+    },
+    "Photoshop" : {
+      "History" : "",
+      "ICCProfile" : "sRGB IEC61966-2.1",
+      "ColorMode" : "3"
+    },
+    "Tiff" : {
+      "XResolution" : "720090/10000",
+      "NativeDigest" : "256,257,258,259,262,274,277,284,530,531,282,283,296,301,318,319,529,532,306,270,271,272,305,315,33432;6A3819C79FDE56A3CEB49BE0CECF0E4B",
+      "ResolutionUnit" : "2",
+      "Orientation" : "1",
+      "YResolution" : "720090/10000"
+    },
+    "XmpMM" : {
+      "DerivedFrom" : {
+        "DocumentID" : "uuid:755EFE1B5BCBE51191D2BA1A4A34CC1F",
+        "InstanceID" : "uuid:765EFE1B5BCBE51191D2BA1A4A34CC1F"
+      },
+      "DocumentID" : "uuid:785EFE1B5BCBE51191D2BA1A4A34CC1F",
+      "InstanceID" : "uuid:795EFE1B5BCBE51191D2BA1A4A34CC1F"
+    },
+    "Exif" : {
+      "PixelYDimension" : "174",
+      "ColorSpace" : "1",
+      "PixelXDimension" : "128",
+      "NativeDigest" : "36864,40960,40961,37121,37122,40962,40963,37510,40964,36867,36868,33434,33437,34850,34852,34855,34856,37377,37378,37379,37380,37381,37382,37383,37384,37385,37386,37396,41483,41484,41486,41487,41488,41492,41493,41495,41728,41729,41730,41985,41986,41987,41988,41989,41990,41991,41992,41993,41994,41995,41996,42016,0,2,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,20,22,23,24,25,26,27,28,30;EB685DEADF67388F7E939885A41C0ECF"
+    },
+    "Dc" : {
+      "Format" : "image/tiff"
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/wav.json b/exec/java-exec/src/test/resources/store/image/wav.json
new file mode 100644
index 0000000..0823442
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/wav.json
@@ -0,0 +1,32 @@
+{
+  "Format" : "WAV",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "0",
+  "DPIHeight" : "0",
+  "PixelWidth" : "0",
+  "PixelHeight" : "0",
+  "BitsPerPixel" : "0",
+  "ColorMode" : "N/A",
+  "HasAlpha" : "false",
+  "Duration" : "00:00:03",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Microsoft PCM",
+  "AudioSampleSize" : "8",
+  "AudioSampleRate" : "11025",
+  "FileType" : {
+    "DetectedFileTypeName" : "WAV",
+    "DetectedFileTypeLongName" : "Waveform Audio File Format",
+    "DetectedMIMEType" : "audio/vnd.wave",
+    "ExpectedFileNameExtension" : "wav"
+  },
+  "WAV" : {
+    "BitsPerSample" : "8",
+    "Format" : "Microsoft PCM",
+    "Channels" : "1",
+    "SamplesPerSecond" : "11025",
+    "BytesPerSecond" : "11025",
+    "BlockAlignment" : "1",
+    "Duration" : "00:00:03"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/webp.json b/exec/java-exec/src/test/resources/store/image/webp.json
new file mode 100644
index 0000000..e602506
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/image/webp.json
@@ -0,0 +1,29 @@
+{
+  "Format" : "WEBP",
+  "PixelWidth" : "400",
+  "PixelHeight" : "301",
+  "HasAlpha" : "true",
+  "Orientaion" : "Unknown (0)",
+  "DPIWidth" : "0",
+  "DPIHeight" : "0",
+  "ColorMode" : "RGB",
+  "BitsPerPixel" : "0",
+  "Duration" : "00:00:00",
+  "VideoCodec" : "Unknown",
+  "FrameRate" : "0",
+  "AudioCodec" : "Unknown",
+  "AudioSampleSize" : "0",
+  "AudioSampleRate" : "0",
+  "FileType" : {
+    "DetectedFileTypeName" : "WebP",
+    "DetectedFileTypeLongName" : "WebP",
+    "DetectedMIMEType" : "image/webp",
+    "ExpectedFileNameExtension" : "webp"
+  },
+  "WebP" : {
+    "ImageWidth" : "400",
+    "ImageHeight" : "301",
+    "HasAlpha" : "true",
+    "IsAnimation" : "false"
+  }
+}
diff --git a/exec/java-exec/src/test/resources/store/image/withExifAndIptc.jpg b/exec/java-exec/src/test/resources/store/image/withExifAndIptc.jpg
new file mode 100644
index 0000000..c9e425d
Binary files /dev/null and b/exec/java-exec/src/test/resources/store/image/withExifAndIptc.jpg differ
diff --git a/exec/jdbc-all/pom.xml b/exec/jdbc-all/pom.xml
index 4345c39..d8423de 100644
--- a/exec/jdbc-all/pom.xml
+++ b/exec/jdbc-all/pom.xml
@@ -172,6 +172,10 @@
           <groupId>commons-validator</groupId>
           <artifactId>commons-validator</artifactId>
         </exclusion>
+        <exclusion>
+          <artifactId>metadata-extractor</artifactId>
+          <groupId>com.drewnoakes</groupId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 03/10: DRILL-6343: bit vector copyFromSafe is not doing realloc

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 5ef220e70fb898500110eabdbbe1b0bf6401049b
Author: Padma Penumarthy <pp...@yahoo.com>
AuthorDate: Thu Apr 19 14:36:38 2018 -0700

    DRILL-6343: bit vector copyFromSafe is not doing realloc
---
 .../src/main/java/org/apache/drill/exec/vector/BitVector.java     | 8 +++-----
 1 file changed, 3 insertions(+), 5 deletions(-)

diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/BitVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/BitVector.java
index 2473556..3725364 100644
--- a/exec/vector/src/main/java/org/apache/drill/exec/vector/BitVector.java
+++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/BitVector.java
@@ -220,13 +220,11 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
     this.mutator.set(outIndex, from.accessor.get(inIndex));
   }
 
-  public boolean copyFromSafe(int inIndex, int outIndex, BitVector from) {
-    if (outIndex >= this.getValueCapacity()) {
-      decrementAllocationMonitor();
-      return false;
+  public void copyFromSafe(int inIndex, int outIndex, BitVector from) {
+    while (outIndex >= this.getValueCapacity()) {
+      reAlloc();
     }
     copyFrom(inIndex, outIndex, from);
-    return true;
   }
 
   @Override

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 05/10: DRILL-6445: Fix existing test cases in TestScripts.java and add new test case for DRILLBIT_CONTEXT variable

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit f48894c7294b70d17ebd92d4c92cd5e60821beeb
Author: Sorabh Hamirwasia <sh...@maprtech.com>
AuthorDate: Thu May 24 11:58:01 2018 -0700

    DRILL-6445: Fix existing test cases in TestScripts.java and add new test case for DRILLBIT_CONTEXT variable
    
    This closes #1289
---
 .../org/apache/drill/yarn/scripts/ScriptUtils.java |  98 +++++++++++--
 .../org/apache/drill/yarn/scripts/TestScripts.java | 161 ++++++++++++++++-----
 2 files changed, 208 insertions(+), 51 deletions(-)

diff --git a/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java b/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java
index 3517cf8..8a909a5 100644
--- a/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java
+++ b/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/ScriptUtils.java
@@ -38,6 +38,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
@@ -66,7 +67,6 @@ public class ScriptUtils {
     String args[] = {
       "-Dlog.path=/.*/drill/log/sqlline\\.log",
       "-Dlog.query.path=/.*/drill/log/sqlline_queries\\.json",
-      "-XX:MaxPermSize=512M",
       "sqlline\\.SqlLine",
       "-d",
       "org\\.apache\\.drill\\.jdbc\\.Driver",
@@ -123,11 +123,10 @@ public class ScriptUtils {
       "-Xms4G",
       "-Xmx4G",
       "-XX:MaxDirectMemorySize=8G",
-      "-XX:MaxPermSize=512M",
       "-XX:ReservedCodeCacheSize=1G",
-      // Removed in Drill 1.8
-//      "-Ddrill\\.exec\\.enable-epoll=true",
-      "-XX:\\+CMSClassUnloadingEnabled",
+      "-Ddrill\\.exec\\.enable-epoll=false",
+      // Removed in Drill 1.14
+      //"-XX:\\+CMSClassUnloadingEnabled",
       "-XX:\\+UseG1GC",
       "org\\.apache\\.drill\\.exec\\.server\\.Drillbit",
       "-Dlog\\.path=/.*/script-test/drill/log/drillbit\\.log",
@@ -197,7 +196,8 @@ public class ScriptUtils {
       "sqlline",
       //sqlline.bat
       //submit_plan
-      "yarn-drillbit.sh"
+      "yarn-drillbit.sh",
+      "auto-setup.sh"
   };
 
   /**
@@ -286,24 +286,89 @@ public class ScriptUtils {
     }
   }
 
+  public void writeEnvFile(PrintWriter out, String key, String value, boolean overrideValue) {
+    out.print("export ");
+    out.print(key);
+    out.print("=");
+
+    if (!overrideValue) {
+      out.print("${");
+      out.print(key);
+      out.print(":-");
+    }
+    out.print("\"");
+    out.print(value);
+    out.print("\"");
+
+    if (!overrideValue) {
+      out.print("}");
+    }
+    out.println();
+  }
+
   /**
    * Create a drill-env.sh or distrib-env.sh file with the given environment in
    * the recommended format.
+   * different formats based on overrideValue flag
+   *
+   * @param file - File instance to set environment variables in
+   * @param env - Environment to be placed inside File
+   * @param overrideValue - true - Set environment value such that it overrides previously set value
+   *                      - false - Set environment value in recommended format.
    */
-
-  public void createEnvFile(File file, Map<String, String> env)
+  public void createEnvFile(File file, Map<String, String> env, boolean overrideValue)
       throws IOException {
     try (PrintWriter out = new PrintWriter(new FileWriter(file))) {
       out.println("#!/usr/bin/env bash");
       for (String key : env.keySet()) {
         String value = env.get(key);
-        out.print("export ");
-        out.print(key);
-        out.print("=${");
-        out.print(key);
-        out.print(":-\"");
-        out.print(value);
-        out.println("\"}");
+        writeEnvFile(out, key, value, overrideValue);
+      }
+    }
+  }
+
+  /**
+   * Creates a drill-env.sh or distrib-env.sh file with the given environment under
+   * a given condition. If size of env map is smaller than condition map then last
+   * env entry is repeated for rest of conditions.
+   *
+   * @param file - File instance to set environment and condition in
+   * @param condition - Conditions to guard environment variable
+   * @param env - Environment to be placed inside File
+   * @param overrideValue - true - Set environment value such that it overrides previously set value
+   *                      - false - Set environment value in recommended format.
+   *
+   */
+  public void createEnvFileWithCondition(File file, Map<String, String> condition,
+                                         Map<String, String> env, boolean overrideValue) throws IOException {
+    if (env.size() == 0 || condition.size() == 0) {
+      return;
+    }
+
+    final Iterator envIterator = env.entrySet().iterator();
+    Map.Entry currentEnv = (Map.Entry) envIterator.next();
+
+    try (PrintWriter out = new PrintWriter(new FileWriter(file))) {
+      out.println("#!/usr/bin/env bash");
+
+      for (String condKey : condition.keySet()) {
+        String condValue = condition.get(condKey);
+        out.print("if [ \"$");
+        out.print(condKey);
+        out.print("\" = \"");
+        out.print(condValue);
+        out.println("\" ]; then");
+
+        final String envKey = currentEnv.getKey().toString();
+        final String envValue = currentEnv.getValue().toString();
+        writeEnvFile(out, envKey, envValue, overrideValue);
+
+        out.println("fi");
+        out.println();
+
+        if (envIterator.hasNext()) {
+          currentEnv = (Map.Entry) envIterator.next();
+        }
       }
     }
   }
@@ -342,7 +407,8 @@ public class ScriptUtils {
    * Consume the input from a stream, specifically the stderr or stdout stream
    * from a process.
    *
-   * @see http://stackoverflow.com/questions/14165517/processbuilder-forwarding-stdout-and-stderr-of-started-processes-without-blocki
+   * @link http://stackoverflow.com/questions/14165517/processbuilder-forwarding-stdout-and-stderr-of-started-processes
+   * -without-blocki
    */
 
   private static class StreamGobbler extends Thread {
diff --git a/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/TestScripts.java b/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/TestScripts.java
index 5f6b5bb..38279f8 100644
--- a/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/TestScripts.java
+++ b/drill-yarn/src/test/java/org/apache/drill/yarn/scripts/TestScripts.java
@@ -159,14 +159,6 @@ public class TestScripts {
       result.validateArgRegex("-Xloggc:.*/" + logTail);
     }
 
-    // Max Perm Size
-
-    {
-      RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_RUN)
-          .addEnv("DRILLBIT_MAX_PERM", "600M").run();
-      result.validateArg("-XX:MaxPermSize=600M");
-    }
-
     // Code cache size
 
     {
@@ -346,9 +338,8 @@ public class TestScripts {
     drillEnv.put("DRILL_HEAP", "5G");
     drillEnv.put("DRILL_MAX_DIRECT_MEMORY", "7G");
     drillEnv.put("SERVER_LOG_GC", "1");
-    drillEnv.put("DRILLBIT_MAX_PERM", "600M");
     drillEnv.put("DRILLBIT_CODE_CACHE_SIZE", "2G");
-    context.createEnvFile(new File(siteDir, fileName), drillEnv);
+    context.createEnvFile(new File(siteDir, fileName), drillEnv, false);
 
     {
       RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_RUN).run();
@@ -358,8 +349,7 @@ public class TestScripts {
           propArg,
           "-Xms5G", "-Xmx5G",
           "-XX:MaxDirectMemorySize=7G",
-          "-XX:ReservedCodeCacheSize=2G",
-          "-XX:MaxPermSize=600M"
+          "-XX:ReservedCodeCacheSize=2G"
       };
 
       result.validateArgs(expectedArgs);
@@ -378,12 +368,50 @@ public class TestScripts {
           .run();
       assertEquals(0, result.returnCode);
       result.validateArg("-XX:MaxDirectMemorySize=9G");
-      result.validateArg("-XX:MaxPermSize=600M");
       String logTail = context.testDrillHome.getName() + "/log/drillbit.gc";
       assertFalse(result.containsArgRegex("-Xloggc:.*/" + logTail));
     }
   }
 
+  @Test
+  public void testDistribEnvWithNegativeCond() throws IOException {
+    // Construct condition map
+    final Map<String, String> conditions = new HashMap<>();
+    conditions.put("DRILLBIT_CONTEXT", "0");
+    final String expectedArgs[] = {"-XX:ReservedCodeCacheSize=1G"};
+    doEnvFileWithConditionTest("distrib-env.sh", conditions, expectedArgs);
+  }
+
+  @Test
+  public void testDistribEnvWithPositiveCond() throws IOException {
+    // Construct condition map
+    final Map<String, String> conditions = new HashMap<>();
+    conditions.put("DRILLBIT_CONTEXT", "1");
+    final String expectedArgs[] = {"-XX:ReservedCodeCacheSize=2G"};
+    doEnvFileWithConditionTest("distrib-env.sh", conditions, expectedArgs);
+  }
+
+  /**
+   * Implementation of the drill-env.sh or distrib-env.sh tests with conditions
+   * guarding environment variables.
+   */
+  private void doEnvFileWithConditionTest(String fileName, Map<String, String> conditions,
+                                          String[] expectedArgs) throws IOException {
+    context.createMockDistrib();
+    File siteDir = new File(context.testDrillHome, "conf");
+    context.createMockConf(siteDir);
+
+    // Set a property in the env file.
+    Map<String, String> drillEnv = new HashMap<>();
+    drillEnv.put("DRILLBIT_CODE_CACHE_SIZE", "2G");
+    context.createEnvFileWithCondition(new File(siteDir, fileName), conditions, drillEnv, false);
+    {
+      RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_RUN).run();
+      assertEquals(0, result.returnCode);
+      result.validateArgs(expectedArgs);
+    }
+  }
+
   /**
    * Test that drill-env.sh overrides distrib-env.sh, and that the environment
    * overrides both. Assumes the basics were tested above.
@@ -400,13 +428,12 @@ public class TestScripts {
     Map<String, String> distribEnv = new HashMap<>();
     distribEnv.put("DRILL_HEAP", "5G");
     distribEnv.put("DRILL_MAX_DIRECT_MEMORY", "7G");
-    distribEnv.put("DRILLBIT_MAX_PERM", "600M");
-    context.createEnvFile(new File(siteDir, "distrib-env.sh"), distribEnv);
+    context.createEnvFile(new File(siteDir, "distrib-env.sh"), distribEnv, false);
 
     Map<String, String> drillEnv = new HashMap<>();
     drillEnv.put("DRILL_HEAP", "6G");
     drillEnv.put("DRILL_MAX_DIRECT_MEMORY", "9G");
-    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv);
+    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv, false);
 
     {
       RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_RUN).run();
@@ -414,8 +441,7 @@ public class TestScripts {
       String expectedArgs[] = {
           "-Xms6G", "-Xmx6G",
           "-XX:MaxDirectMemorySize=9G",
-          "-XX:MaxPermSize=600M",
-          "-XX:ReservedCodeCacheSize=1G" // Default
+          "-XX:ReservedCodeCacheSize=1024m" // Default
       };
 
       result.validateArgs(expectedArgs);
@@ -428,8 +454,7 @@ public class TestScripts {
       String expectedArgs[] = {
           "-Xms6G", "-Xmx6G",
           "-XX:MaxDirectMemorySize=5G",
-          "-XX:MaxPermSize=600M",
-          "-XX:ReservedCodeCacheSize=1G" // Default
+          "-XX:ReservedCodeCacheSize=1024m" // Default
       };
 
       result.validateArgs(expectedArgs);
@@ -498,19 +523,17 @@ public class TestScripts {
     Map<String, String> distribEnv = new HashMap<>();
     distribEnv.put("DRILL_HEAP", "5G");
     distribEnv.put("DRILL_MAX_DIRECT_MEMORY", "7G");
-    distribEnv.put("DRILLBIT_MAX_PERM", "600M");
-    context.createEnvFile(new File(confDir, "distrib-env.sh"), distribEnv);
+    context.createEnvFile(new File(confDir, "distrib-env.sh"), distribEnv, false);
 
     Map<String, String> drillEnv = new HashMap<>();
     drillEnv.put("DRILL_HEAP", "6G");
     drillEnv.put("DRILL_MAX_DIRECT_MEMORY", "9G");
-    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv);
+    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv, false);
 
     String expectedArgs[] = {
         "-Xms6G", "-Xmx6G",
         "-XX:MaxDirectMemorySize=9G",
-        "-XX:MaxPermSize=600M",
-        "-XX:ReservedCodeCacheSize=1G" // Default
+        "-XX:ReservedCodeCacheSize=1024m" // Default
     };
 
     // Site set using argument
@@ -611,8 +634,7 @@ public class TestScripts {
 
     String prefix = "-Djava.library.path=";
     {
-      RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_RUN)
-          .run();
+      RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_RUN).run();
       assertFalse(result.containsArgRegex(prefix + ".*"));
       assertNull(result.libPath);
     }
@@ -874,7 +896,7 @@ public class TestScripts {
     File pidDir = context.createDir(new File(context.testDir, "pid"));
     Map<String, String> drillEnv = new HashMap<>();
     drillEnv.put("DRILL_PID_DIR", pidDir.getAbsolutePath());
-    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv);
+    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv, false);
 
     {
       RunResult result = new DrillbitRun(DrillbitRun.DRILLBIT_START)
@@ -905,7 +927,7 @@ public class TestScripts {
 
     Map<String, String> drillEnv = new HashMap<>();
     drillEnv.put("DRILL_MAX_DIRECT_MEMORY", "9G");
-    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv);
+    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv, false);
 
     // Use the -site (--config) option.
 
@@ -948,7 +970,7 @@ public class TestScripts {
     context.removeDir(new File(context.testDrillHome, "log"));
     Map<String, String> drillEnv = new HashMap<>();
     drillEnv.put("DRILL_LOG_DIR", logsDir.getAbsolutePath());
-    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv);
+    context.createEnvFile(new File(siteDir, "drill-env.sh"), drillEnv, false);
 
     {
       DrillbitRun runner = new DrillbitRun(DrillbitRun.DRILLBIT_START);
@@ -1122,7 +1144,6 @@ public class TestScripts {
       drillEnv.put("DRILL_HEAP", "5G");
       drillEnv.put("DRILL_MAX_DIRECT_MEMORY", "7G");
       drillEnv.put("SERVER_LOG_GC", "1");
-      drillEnv.put("DRILLBIT_MAX_PERM", "600M");
       drillEnv.put("DRILLBIT_CODE_CACHE_SIZE", "2G");
       RunResult result = new ScriptRunner("sqlline")
           .withEnvironment(drillEnv)
@@ -1138,11 +1159,9 @@ public class TestScripts {
 
       Map<String, String> shellEnv = new HashMap<>();
       shellEnv.put("CLIENT_GC_OPTS", "-XX:+UseG1GC");
-      shellEnv.put("SQLLINE_JAVA_OPTS", "-XX:MaxPermSize=256M");
       RunResult result = new ScriptRunner("sqlline")
           .withEnvironment(shellEnv)
           .run();
-      assertTrue(result.containsArg("-XX:MaxPermSize=256M"));
       assertTrue(result.containsArg("-XX:+UseG1GC"));
     }
     {
@@ -1156,7 +1175,6 @@ public class TestScripts {
       drillEnv.put("DRILL_HEAP", "5G");
       drillEnv.put("DRILL_MAX_DIRECT_MEMORY", "7G");
       drillEnv.put("SERVER_LOG_GC", "1");
-      drillEnv.put("DRILLBIT_MAX_PERM", "600M");
       drillEnv.put("DRILLBIT_CODE_CACHE_SIZE", "2G");
       drillEnv.put("DRILL_EMBEDDED", "1");
       RunResult result = new ScriptRunner("sqlline")
@@ -1168,7 +1186,6 @@ public class TestScripts {
           "-Xms5G", "-Xmx5G",
           "-XX:MaxDirectMemorySize=7G",
           "-XX:ReservedCodeCacheSize=2G",
-          "-XX:MaxPermSize=600M"
       };
 
       result.validateArgs(expectedArgs);
@@ -1177,6 +1194,80 @@ public class TestScripts {
   }
 
   /**
+   * Test to verify no effect of DRILLBIT_CONTEXT for Sqlline.
+   * @throws IOException
+   */
+  @Test
+  public void testSqllineWithDrillbitContextEnv() throws IOException {
+    context.createMockDistrib();
+    File siteDir = new File(context.testDrillHome, "conf");
+    context.createMockConf(siteDir);
+
+    // Test when SQLLINE_JAVA_OPTS is overriden inside a condition for
+    // DRILLBIT_CONTEXT = 0, then there is no effect
+    {
+      // Create a condition variable to be placed in distrib-env.sh
+      Map<String, String> conditions = new HashMap<>();
+      conditions.put("DRILLBIT_CONTEXT", "0");
+
+      // Create environment variable to be placed inside a condition in distrib-env.sh
+      Map<String, String> drillEnv = new HashMap<>();
+      drillEnv.put("SQLLINE_JAVA_OPTS", "-XX:MaxPermSize=256M");
+
+      // Create the environment variable file overriding SQLLINE_JAVA_OPTS
+      context.createEnvFileWithCondition(new File(siteDir, "distrib-env.sh"), conditions, drillEnv, true);
+
+      // Expected value of the property
+      String expectedArgs[] = {"-XX:MaxPermSize=256M"};
+
+      // Run the test and match the output with expectedArgs
+      RunResult result = new ScriptRunner("sqlline").run();
+      assertEquals(0, result.returnCode);
+      result.validateJava();
+      result.validateClassPath(ScriptUtils.stdCp);
+      // Since by default MaxPermSize is not set anymore for Sqlline. It's removed in 1.13
+      assertFalse(result.containsArgsRegex(expectedArgs));
+    }
+
+    // Test when SQLLINE_JAVA_OPTS is overriden inside a condition for
+    // DRILLBIT_CONTEXT = 1, then there is no effect
+    {
+      Map<String, String> conditions = new HashMap<>();
+      conditions.put("DRILLBIT_CONTEXT", "1");
+
+      Map<String, String> drillEnv = new HashMap<>();
+      drillEnv.put("SQLLINE_JAVA_OPTS", "-XX:MaxPermSize=256M");
+      String expectedArgs[] = {"-XX:MaxPermSize=256M"};
+
+      // Create the environment variable file overriding SQLLINE_JAVA_OPTS
+      context.createEnvFileWithCondition(new File(siteDir, "distrib-env.sh"), conditions, drillEnv, true);
+      RunResult result = new ScriptRunner("sqlline").run();
+      assertEquals(0, result.returnCode);
+      result.validateJava();
+      result.validateClassPath(ScriptUtils.stdCp);
+      // Since by default MaxPermSize is not set anymore for Sqlline. It's removed in 1.13
+      assertFalse(result.containsArgsRegex(expectedArgs));
+    }
+
+    // Test when SQLLINE_JAVA_OPTS is overriden without condition for
+    // DRILLBIT_CONTEXT then the environment variable is updated
+    {
+      Map<String, String> drillEnv = new HashMap<>();
+      drillEnv.put("SQLLINE_JAVA_OPTS", "-XX:MaxPermSize=256M");
+
+      // Create the environment variable file overriding SQLLINE_JAVA_OPTS without any condition
+      // around it.
+      String expectedArgs[] = {"-XX:MaxPermSize=256M"};
+      context.createEnvFile(new File(siteDir, "distrib-env.sh"), drillEnv, true);
+      RunResult result = new ScriptRunner("sqlline").run();
+      assertEquals(0, result.returnCode);
+      result.validateJava();
+      result.validateClassPath(ScriptUtils.stdCp);
+      assertTrue(result.containsArgsRegex(expectedArgs));
+    }
+  }
+
+  /**
    * Verify that the sqlline client works with the --site option by customizing
    * items in the site directory.
    *

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 04/10: DRILL-6236:Batch sizing for hash join

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 480ade960767cf138de184c7873792e96b0e9a9f
Author: Padma Penumarthy <pp...@yahoo.com>
AuthorDate: Wed May 30 14:00:16 2018 -0700

    DRILL-6236:Batch sizing for hash join
    
    This closes #1227
---
 .../exec/physical/impl/join/HashJoinBatch.java     | 125 ++++---
 .../join/HashJoinMechanicalMemoryCalculator.java   |   1 +
 .../impl/join/HashJoinMemoryCalculator.java        |   1 +
 .../impl/join/HashJoinMemoryCalculatorImpl.java    |  35 +-
 .../exec/physical/impl/join/HashJoinProbe.java     |   2 +
 .../physical/impl/join/HashJoinProbeTemplate.java  |  22 +-
 .../exec/record/AbstractBinaryRecordBatch.java     |   4 +
 .../drill/exec/record/JoinBatchMemoryManager.java  |  61 ++--
 .../exec/record/RecordBatchMemoryManager.java      |  26 +-
 .../apache/drill/exec/record/RecordBatchSizer.java |  49 ++-
 .../impl/join/TestBuildSidePartitioningImpl.java   |  20 +-
 .../exec/physical/unit/TestOutputBatchSize.java    | 386 +++++++++++++++++++++
 12 files changed, 603 insertions(+), 129 deletions(-)

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
index ee7a8a3..4267077 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
@@ -57,16 +57,19 @@ import org.apache.drill.exec.physical.impl.common.HashPartition;
 import org.apache.drill.exec.physical.impl.spill.SpillSet;
 import org.apache.drill.exec.record.AbstractBinaryRecordBatch;
 import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.JoinBatchMemoryManager;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.RecordBatchSizer;
 import org.apache.drill.exec.record.VectorWrapper;
-import org.apache.drill.exec.vector.FixedWidthVector;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.ValueVector;
-import org.apache.drill.exec.vector.VariableWidthVector;
 import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.apache.calcite.rel.core.JoinRelType;
 
+import static org.apache.drill.exec.record.JoinBatchMemoryManager.LEFT_INDEX;
+import static org.apache.drill.exec.record.JoinBatchMemoryManager.RIGHT_INDEX;
+
 /**
  *   This class implements the runtime execution for the Hash-Join operator
  *   supporting INNER, LEFT OUTER, RIGHT OUTER, and FULL OUTER joins
@@ -95,11 +98,6 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
    */
   private int RECORDS_PER_BATCH; // internal batches
 
-  /**
-   * The maximum number of records in each outgoing batch.
-   */
-  private static final int TARGET_RECORDS_PER_BATCH = 4000;
-
   // Join type, INNER, LEFT, RIGHT or OUTER
   private final JoinRelType joinType;
 
@@ -172,7 +170,8 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
     public String outerSpillFile;
     int cycleNum;
     int origPartn;
-    int prevOrigPartn; }
+    int prevOrigPartn;
+  }
 
   /**
    * Queue of spilled partitions to process.
@@ -181,7 +180,6 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
   private HJSpilledPartition spilledInners[]; // for the outer to find the partition
 
   public enum Metric implements MetricDef {
-
     NUM_BUCKETS,
     NUM_ENTRIES,
     NUM_RESIZING,
@@ -190,8 +188,19 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
     SPILLED_PARTITIONS, // number of original partitions spilled to disk
     SPILL_MB,         // Number of MB of data spilled to disk. This amount is first written,
                       // then later re-read. So, disk I/O is twice this amount.
-    SPILL_CYCLE       // 0 - no spill, 1 - spill, 2 - SECONDARY, 3 - TERTIARY
-    ;
+    SPILL_CYCLE,       // 0 - no spill, 1 - spill, 2 - SECONDARY, 3 - TERTIARY
+    LEFT_INPUT_BATCH_COUNT,
+    LEFT_AVG_INPUT_BATCH_BYTES,
+    LEFT_AVG_INPUT_ROW_BYTES,
+    LEFT_INPUT_RECORD_COUNT,
+    RIGHT_INPUT_BATCH_COUNT,
+    RIGHT_AVG_INPUT_BATCH_BYTES,
+    RIGHT_AVG_INPUT_ROW_BYTES,
+    RIGHT_INPUT_RECORD_COUNT,
+    OUTPUT_BATCH_COUNT,
+    AVG_OUTPUT_BATCH_BYTES,
+    AVG_OUTPUT_ROW_BYTES,
+    OUTPUT_RECORD_COUNT;
 
     // duplicate for hash ag
 
@@ -221,12 +230,7 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
       throw new SchemaChangeException(e);
     }
 
-    // Build the container schema and set the counts
-    for (final VectorWrapper<?> w : container) {
-      w.getValueVector().allocateNew();
-    }
     container.buildSchema(BatchSchema.SelectionVectorMode.NONE);
-    container.setRecordCount(outputRecords);
   }
 
   @Override
@@ -234,6 +238,15 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
     leftUpstream = sniffNonEmptyBatch(0, left);
     rightUpstream = sniffNonEmptyBatch(1, right);
 
+    // For build side, use aggregate i.e. average row width across batches
+    batchMemoryManager.update(LEFT_INDEX, 0);
+    batchMemoryManager.update(RIGHT_INDEX, 0, true);
+
+    if (logger.isDebugEnabled()) {
+      logger.debug("BATCH_STATS, incoming left:\n {}", batchMemoryManager.getRecordBatchSizer(LEFT_INDEX));
+      logger.debug("BATCH_STATS, incoming right:\n {}", batchMemoryManager.getRecordBatchSizer(RIGHT_INDEX));
+    }
+
     if (leftUpstream == IterOutcome.STOP || rightUpstream == IterOutcome.STOP) {
       state = BatchState.STOP;
       return false;
@@ -333,10 +346,21 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
            joinType != JoinRelType.INNER) {  // or if this is a left/full outer join
 
         // Allocate the memory for the vectors in the output container
-        allocateVectors();
+        batchMemoryManager.allocateVectors(container);
+        hashJoinProbe.setTargetOutputCount(batchMemoryManager.getOutputRowCount());
 
         outputRecords = hashJoinProbe.probeAndProject();
 
+        for (final VectorWrapper<?> v : container) {
+          v.getValueVector().getMutator().setValueCount(outputRecords);
+        }
+        container.setRecordCount(outputRecords);
+
+        batchMemoryManager.updateOutgoingStats(outputRecords);
+        if (logger.isDebugEnabled()) {
+          logger.debug("BATCH_STATS, outgoing:\n {}", new RecordBatchSizer(this));
+        }
+
         /* We are here because of one the following
          * 1. Completed processing of all the records and we are done
          * 2. We've filled up the outgoing batch to the maximum and we need to return upstream
@@ -347,10 +371,6 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
             state = BatchState.NOT_FIRST;
           }
 
-          for (final VectorWrapper<?> v : container) {
-            v.getValueVector().getMutator().setValueCount(outputRecords);
-          }
-
           return IterOutcome.OK;
         }
 
@@ -557,7 +577,8 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
         RECORDS_PER_BATCH,
         maxBatchSize,
         maxBatchSize,
-        TARGET_RECORDS_PER_BATCH,
+        batchMemoryManager.getOutputRowCount(),
+        batchMemoryManager.getOutputBatchSize(),
         HashTable.DEFAULT_LOAD_FACTOR);
 
       disableSpilling(null);
@@ -628,7 +649,8 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
         RECORDS_PER_BATCH,
         maxBatchSize,
         maxBatchSize,
-        TARGET_RECORDS_PER_BATCH,
+        batchMemoryManager.getOutputRowCount(),
+        batchMemoryManager.getOutputBatchSize(),
         HashTable.DEFAULT_LOAD_FACTOR);
 
       if (firstCycle && doMemoryCalculation) {
@@ -665,6 +687,7 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
         for (HashPartition partn : partitions) { partn.updateBatches(); }
         // Fall through
       case OK:
+        batchMemoryManager.update(buildBatch, RIGHT_INDEX, 0, true);
         // Special treatment (when no spill, and single partition) -- use the incoming vectors as they are (no row copy)
         if ( numPartitions == 1 ) {
           partitions[0].appendBatch(buildBatch);
@@ -803,22 +826,6 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
 
   }
 
-  private void allocateVectors() {
-    for (final VectorWrapper<?> vectorWrapper : container) {
-      ValueVector valueVector = vectorWrapper.getValueVector();
-
-      if (valueVector instanceof FixedWidthVector) {
-        ((FixedWidthVector) valueVector).allocateNew(TARGET_RECORDS_PER_BATCH);
-      } else if (valueVector instanceof VariableWidthVector) {
-        ((VariableWidthVector) valueVector).allocateNew(8 * TARGET_RECORDS_PER_BATCH, TARGET_RECORDS_PER_BATCH);
-      } else {
-        valueVector.allocateNew();
-      }
-    }
-
-    container.setRecordCount(0); // reset container's counter back to zero records
-  }
-
   // (After the inner side was read whole) - Has that inner partition spilled
   public boolean isSpilledInner(int part) {
     if ( spilledInners == null ) { return false; } // empty inner
@@ -879,6 +886,10 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
 
     // Create empty partitions (in the ctor - covers the case where right side is empty)
     partitions = new HashPartition[0];
+
+    // get the output batch size from config.
+    int configuredBatchSize = (int) context.getOptions().getOption(ExecConstants.OUTPUT_BATCH_SIZE_VALIDATOR);
+    batchMemoryManager = new JoinBatchMemoryManager(configuredBatchSize, left, right);
   }
 
   /**
@@ -966,6 +977,23 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
     buildBatch.kill(sendUpstream);
   }
 
+  public void updateMetrics() {
+    stats.setLongStat(HashJoinBatch.Metric.LEFT_INPUT_BATCH_COUNT, batchMemoryManager.getNumIncomingBatches(LEFT_INDEX));
+    stats.setLongStat(HashJoinBatch.Metric.LEFT_AVG_INPUT_BATCH_BYTES, batchMemoryManager.getAvgInputBatchSize(LEFT_INDEX));
+    stats.setLongStat(HashJoinBatch.Metric.LEFT_AVG_INPUT_ROW_BYTES, batchMemoryManager.getAvgInputRowWidth(LEFT_INDEX));
+    stats.setLongStat(HashJoinBatch.Metric.LEFT_INPUT_RECORD_COUNT, batchMemoryManager.getTotalInputRecords(LEFT_INDEX));
+
+    stats.setLongStat(HashJoinBatch.Metric.RIGHT_INPUT_BATCH_COUNT, batchMemoryManager.getNumIncomingBatches(RIGHT_INDEX));
+    stats.setLongStat(HashJoinBatch.Metric.RIGHT_AVG_INPUT_BATCH_BYTES, batchMemoryManager.getAvgInputBatchSize(RIGHT_INDEX));
+    stats.setLongStat(HashJoinBatch.Metric.RIGHT_AVG_INPUT_ROW_BYTES, batchMemoryManager.getAvgInputRowWidth(RIGHT_INDEX));
+    stats.setLongStat(HashJoinBatch.Metric.RIGHT_INPUT_RECORD_COUNT, batchMemoryManager.getTotalInputRecords(RIGHT_INDEX));
+
+    stats.setLongStat(HashJoinBatch.Metric.OUTPUT_BATCH_COUNT, batchMemoryManager.getNumOutgoingBatches());
+    stats.setLongStat(HashJoinBatch.Metric.AVG_OUTPUT_BATCH_BYTES, batchMemoryManager.getAvgOutputBatchSize());
+    stats.setLongStat(HashJoinBatch.Metric.AVG_OUTPUT_ROW_BYTES, batchMemoryManager.getAvgOutputRowWidth());
+    stats.setLongStat(HashJoinBatch.Metric.OUTPUT_RECORD_COUNT, batchMemoryManager.getTotalOutputRecords());
+  }
+
   @Override
   public void close() {
     if ( cycleNum > 0 ) { // spilling happened
@@ -973,6 +1001,25 @@ public class HashJoinBatch extends AbstractBinaryRecordBatch<HashJoinPOP> {
       // SpilledRecordBatch "scanners" as it only knows about the original left/right ops.
       killIncoming(false);
     }
+
+    updateMetrics();
+
+    logger.debug("BATCH_STATS, incoming aggregate left: batch count : {}, avg bytes : {},  avg row bytes : {}, record count : {}",
+      batchMemoryManager.getNumIncomingBatches(JoinBatchMemoryManager.LEFT_INDEX),
+      batchMemoryManager.getAvgInputBatchSize(JoinBatchMemoryManager.LEFT_INDEX),
+      batchMemoryManager.getAvgInputRowWidth(JoinBatchMemoryManager.LEFT_INDEX),
+      batchMemoryManager.getTotalInputRecords(JoinBatchMemoryManager.LEFT_INDEX));
+
+    logger.debug("BATCH_STATS, incoming aggregate right: batch count : {}, avg bytes : {},  avg row bytes : {}, record count : {}",
+      batchMemoryManager.getNumIncomingBatches(JoinBatchMemoryManager.RIGHT_INDEX),
+      batchMemoryManager.getAvgInputBatchSize(JoinBatchMemoryManager.RIGHT_INDEX),
+      batchMemoryManager.getAvgInputRowWidth(JoinBatchMemoryManager.RIGHT_INDEX),
+      batchMemoryManager.getTotalInputRecords(JoinBatchMemoryManager.RIGHT_INDEX));
+
+    logger.debug("BATCH_STATS, outgoing aggregate: batch count : {}, avg bytes : {},  avg row bytes : {}, record count : {}",
+      batchMemoryManager.getNumOutgoingBatches(), batchMemoryManager.getAvgOutputBatchSize(),
+      batchMemoryManager.getAvgOutputRowWidth(), batchMemoryManager.getTotalOutputRecords());
+
     this.cleanup();
     super.close();
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMechanicalMemoryCalculator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMechanicalMemoryCalculator.java
index 618e80e..fb087a0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMechanicalMemoryCalculator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMechanicalMemoryCalculator.java
@@ -77,6 +77,7 @@ public class HashJoinMechanicalMemoryCalculator implements HashJoinMemoryCalcula
                            int maxBatchNumRecordsBuild,
                            int maxBatchNumRecordsProbe,
                            int outputBatchNumRecords,
+                           int outputBatchSize,
                            double loadFactor) {
       this.initialPartitions = initialPartitions;
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculator.java
index 71292a5..868fbfd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculator.java
@@ -100,6 +100,7 @@ public interface HashJoinMemoryCalculator extends HashJoinStateCalculator<HashJo
                     int maxBatchNumRecordsBuild,
                     int maxBatchNumRecordsProbe,
                     int outputBatchNumRecords,
+                    int outputBatchSize,
                     double loadFactor);
 
     void setPartitionStatSet(PartitionStatSet partitionStatSet);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculatorImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculatorImpl.java
index ed0adc5..37f3329 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculatorImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinMemoryCalculatorImpl.java
@@ -142,6 +142,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
                            int maxBatchNumRecordsBuild,
                            int maxBatchNumRecordsProbe,
                            int outputBatchNumRecords,
+                           int outputBatchSize,
                            double loadFactor) {
       this.initialPartitions = initialPartitions;
     }
@@ -203,7 +204,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
    * <h1>Life Cycle</h1>
    * <p>
    *   <ul>
-   *     <li><b>Step 0:</b> Call {@link #initialize(boolean, boolean, RecordBatch, RecordBatch, Set, long, int, int, int, int, int, int, double)}.
+   *     <li><b>Step 0:</b> Call {@link #initialize(boolean, boolean, RecordBatch, RecordBatch, Set, long, int, int, int, int, int, int, int, double)}.
    *     This will initialize the StateCalculate with the additional information it needs.</li>
    *     <li><b>Step 1:</b> Call {@link #getNumPartitions()} to see the number of partitions that fit in memory.</li>
    *     <li><b>Step 2:</b> Call {@link #shouldSpill()} To determine if spilling needs to occurr.</li>
@@ -233,9 +234,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
     private int partitions;
     private int recordsPerPartitionBatchBuild;
     private int recordsPerPartitionBatchProbe;
-    private int outputBatchNumRecords;
-    private Map<String, Long> buildValueSizes;
-    private Map<String, Long> probeValueSizes;
+    private int outputBatchSize;
     private Map<String, Long> keySizes;
     private boolean autoTune;
     private boolean reserveHash;
@@ -273,6 +272,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
                            int maxBatchNumRecordsBuild,
                            int maxBatchNumRecordsProbe,
                            int outputBatchNumRecords,
+                           int outputBatchSize,
                            double loadFactor) {
       Preconditions.checkNotNull(buildSideBatch);
       Preconditions.checkNotNull(probeSideBatch);
@@ -300,8 +300,6 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
 
       initialize(autoTune,
         reserveHash,
-        buildValueSizes,
-        probeValueSizes,
         keySizes,
         memoryAvailable,
         initialPartitions,
@@ -313,7 +311,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
         recordsPerPartitionBatchProbe,
         maxBatchNumRecordsBuild,
         maxBatchNumRecordsProbe,
-        outputBatchNumRecords,
+        outputBatchSize,
         loadFactor);
     }
 
@@ -352,8 +350,6 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
     @VisibleForTesting
     protected void initialize(boolean autoTune,
                               boolean reserveHash,
-                              CaseInsensitiveMap<Long> buildValueSizes,
-                              CaseInsensitiveMap<Long> probeValueSizes,
                               CaseInsensitiveMap<Long> keySizes,
                               long memoryAvailable,
                               int initialPartitions,
@@ -365,7 +361,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
                               int recordsPerPartitionBatchProbe,
                               int maxBatchNumRecordsBuild,
                               int maxBatchNumRecordsProbe,
-                              int outputBatchNumRecords,
+                              int outputBatchSize,
                               double loadFactor) {
       Preconditions.checkState(!firstInitialized);
       Preconditions.checkArgument(initialPartitions >= 1);
@@ -374,8 +370,6 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
       this.loadFactor = loadFactor;
       this.autoTune = autoTune;
       this.reserveHash = reserveHash;
-      this.buildValueSizes = Preconditions.checkNotNull(buildValueSizes);
-      this.probeValueSizes = Preconditions.checkNotNull(probeValueSizes);
       this.keySizes = Preconditions.checkNotNull(keySizes);
       this.memoryAvailable = memoryAvailable;
       this.buildBatchSize = buildBatchSize;
@@ -387,7 +381,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
       this.recordsPerPartitionBatchProbe = recordsPerPartitionBatchProbe;
       this.maxBatchNumRecordsBuild = maxBatchNumRecordsBuild;
       this.maxBatchNumRecordsProbe = maxBatchNumRecordsProbe;
-      this.outputBatchNumRecords = outputBatchNumRecords;
+      this.outputBatchSize = outputBatchSize;
 
       calculateMemoryUsage();
 
@@ -448,8 +442,7 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
         safetyFactor,
         reserveHash);
 
-      maxOutputBatchSize = computeMaxOutputBatchSize(buildValueSizes, probeValueSizes, keySizes,
-        outputBatchNumRecords, safetyFactor, fragmentationFactor);
+      maxOutputBatchSize = (long) ((double)outputBatchSize * fragmentationFactor * safetyFactor);
 
       long probeReservedMemory;
 
@@ -519,18 +512,6 @@ public class HashJoinMemoryCalculatorImpl implements HashJoinMemoryCalculator {
       }
     }
 
-    public static long computeMaxOutputBatchSize(Map<String, Long> buildValueSizes,
-                                                 Map<String, Long> probeValueSizes,
-                                                 Map<String, Long> keySizes,
-                                                 int outputNumRecords,
-                                                 double safetyFactor,
-                                                 double fragmentationFactor) {
-      long outputSize = HashTableSizeCalculatorConservativeImpl.computeVectorSizes(keySizes, outputNumRecords, safetyFactor)
-        + HashTableSizeCalculatorConservativeImpl.computeVectorSizes(buildValueSizes, outputNumRecords, safetyFactor)
-        + HashTableSizeCalculatorConservativeImpl.computeVectorSizes(probeValueSizes, outputNumRecords, safetyFactor);
-      return RecordBatchSizer.multiplyByFactor(outputSize, fragmentationFactor);
-    }
-
     @Override
     public boolean shouldSpill() {
       Preconditions.checkState(initialized);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbe.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbe.java
index f212605..5059b18 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbe.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbe.java
@@ -42,4 +42,6 @@ public interface HashJoinProbe {
   void setupHashJoinProbe(RecordBatch probeBatch, HashJoinBatch outgoing, JoinRelType joinRelType, RecordBatch.IterOutcome leftStartState, HashPartition[] partitions, int cycleNum, VectorContainer container, HashJoinBatch.HJSpilledPartition[] spilledInners, boolean buildSideIsEmpty, int numPartitions, int rightHVColPosition);
   int  probeAndProject() throws SchemaChangeException;
   void changeToFinalProbeState();
+  void setTargetOutputCount(int targetOutputCount);
+  int getOutputCount();
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
index 75c3073..46f2fa3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinProbeTemplate.java
@@ -31,6 +31,8 @@ import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.ValueVector;
 
+import static org.apache.drill.exec.record.JoinBatchMemoryManager.LEFT_INDEX;
+
 public abstract class HashJoinProbeTemplate implements HashJoinProbe {
 
   VectorContainer container; // the outgoing container
@@ -45,8 +47,6 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
 
   private HashJoinBatch outgoingJoinBatch = null;
 
-  private static final int TARGET_RECORDS_PER_BATCH = 4000;
-
   // Number of records to process on the probe side
   private int recordsToProcess = 0;
 
@@ -83,6 +83,16 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
   private int partitionMask = 0; // numPartitions - 1
   private int bitsInMask = 0; // number of bits in the MASK
   private int rightHVColPosition;
+  private int targetOutputRecords;
+
+  @Override
+  public void setTargetOutputCount(int targetOutputRecords) {
+    this.targetOutputRecords = targetOutputRecords;
+  }
+
+  public int getOutputCount() {
+    return outputRecords;
+  }
 
   /**
    *  Setup the Hash Join Probe object
@@ -209,7 +219,7 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
 
 
   private void executeProjectRightPhase(int currBuildPart) {
-    while (outputRecords < TARGET_RECORDS_PER_BATCH && recordsProcessed < recordsToProcess) {
+    while (outputRecords < targetOutputRecords && recordsProcessed < recordsToProcess) {
       outputRecords =
         outputRow(partitions[currBuildPart].getContainers(), unmatchedBuildIndexes.get(recordsProcessed),
           null /* no probeBatch */, 0 /* no probe index */ );
@@ -219,7 +229,7 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
 
   private void executeProbePhase() throws SchemaChangeException {
 
-    while (outputRecords < TARGET_RECORDS_PER_BATCH && probeState != ProbeState.DONE && probeState != ProbeState.PROJECT_RIGHT) {
+    while (outputRecords < targetOutputRecords && probeState != ProbeState.DONE && probeState != ProbeState.PROJECT_RIGHT) {
 
       // Check if we have processed all records in this batch we need to invoke next
       if (recordsProcessed == recordsToProcess) {
@@ -262,6 +272,7 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
                 probeBatch.getSchema());
             }
           case OK:
+            setTargetOutputCount(outgoingJoinBatch.getBatchMemoryManager().update(probeBatch, LEFT_INDEX,outputRecords));
             recordsToProcess = probeBatch.getRecordCount();
             recordsProcessed = 0;
             // If we received an empty batch do nothing
@@ -274,10 +285,9 @@ public abstract class HashJoinProbeTemplate implements HashJoinProbe {
         }
       }
 
-        int probeIndex = -1;
+      int probeIndex = -1;
       // Check if we need to drain the next row in the probe side
       if (getNextRecord) {
-
         if ( !buildSideIsEmpty ) {
           int hashCode = ( cycleNum == 0 ) ?
             partitions[0].getProbeHashCode(recordsProcessed)
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractBinaryRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractBinaryRecordBatch.java
index 9052836..d75463b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractBinaryRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractBinaryRecordBatch.java
@@ -121,6 +121,10 @@ public abstract class AbstractBinaryRecordBatch<T extends PhysicalOperator> exte
     return (leftOutcome == IterOutcome.NONE && rightOutcome == IterOutcome.NONE);
   }
 
+  public RecordBatchMemoryManager getBatchMemoryManager() {
+    return batchMemoryManager;
+  }
+
   protected void updateBatchMemoryManagerStats() {
     stats.setLongStat(Metric.LEFT_INPUT_BATCH_COUNT, batchMemoryManager.getNumIncomingBatches(LEFT_INDEX));
     stats.setLongStat(Metric.LEFT_AVG_INPUT_BATCH_BYTES, batchMemoryManager.getAvgInputBatchSize(LEFT_INDEX));
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/JoinBatchMemoryManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/JoinBatchMemoryManager.java
index c147cf7..16b06fe 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/JoinBatchMemoryManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/JoinBatchMemoryManager.java
@@ -20,42 +20,26 @@ package org.apache.drill.exec.record;
 public class JoinBatchMemoryManager extends RecordBatchMemoryManager {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JoinBatchMemoryManager.class);
 
-  private int leftRowWidth;
-
-  private int rightRowWidth;
-
-  private RecordBatch leftIncoming;
-
-  private RecordBatch rightIncoming;
+  private int rowWidth[];
+  private RecordBatch recordBatch[];
 
   private static final int numInputs = 2;
-
   public static final int LEFT_INDEX = 0;
-
   public static final int RIGHT_INDEX = 1;
 
   public JoinBatchMemoryManager(int outputBatchSize, RecordBatch leftBatch, RecordBatch rightBatch) {
     super(numInputs, outputBatchSize);
-    this.leftIncoming = leftBatch;
-    this.rightIncoming = rightBatch;
+    recordBatch = new RecordBatch[numInputs];
+    recordBatch[LEFT_INDEX] = leftBatch;
+    recordBatch[RIGHT_INDEX] = rightBatch;
+    rowWidth = new int[numInputs];
   }
 
-  @Override
-  public int update(int inputIndex, int outputPosition) {
-    switch (inputIndex) {
-      case LEFT_INDEX:
-        setRecordBatchSizer(inputIndex, new RecordBatchSizer(leftIncoming));
-        leftRowWidth = getRecordBatchSizer(inputIndex).getRowAllocSize();
-        break;
-      case RIGHT_INDEX:
-        setRecordBatchSizer(inputIndex, new RecordBatchSizer(rightIncoming));
-        rightRowWidth = getRecordBatchSizer(inputIndex).getRowAllocSize();
-      default:
-        break;
-    }
-
+  private int updateInternal(int inputIndex, int outputPosition,  boolean useAggregate) {
     updateIncomingStats(inputIndex);
-    final int newOutgoingRowWidth = leftRowWidth + rightRowWidth;
+    rowWidth[inputIndex] = useAggregate ? (int) getAvgInputRowWidth(inputIndex) : getRecordBatchSizer(inputIndex).getRowAllocSize();
+
+    final int newOutgoingRowWidth = rowWidth[LEFT_INDEX] + rowWidth[RIGHT_INDEX];
 
     // If outgoing row width is 0, just return. This is possible for empty batches or
     // when first set of batches come with OK_NEW_SCHEMA and no data.
@@ -85,13 +69,24 @@ public class JoinBatchMemoryManager extends RecordBatchMemoryManager {
   }
 
   @Override
-  public RecordBatchSizer.ColumnSize getColumnSize(String name) {
-    RecordBatchSizer leftSizer = getRecordBatchSizer(LEFT_INDEX);
-    RecordBatchSizer rightSizer = getRecordBatchSizer(RIGHT_INDEX);
+  public int update(int inputIndex, int outputPosition, boolean useAggregate) {
+    setRecordBatchSizer(inputIndex, new RecordBatchSizer(recordBatch[inputIndex]));
+    return updateInternal(inputIndex, outputPosition, useAggregate);
+  }
 
-    if (leftSizer != null && leftSizer.getColumn(name) != null) {
-      return leftSizer.getColumn(name);
-    }
-    return rightSizer == null ? null : rightSizer.getColumn(name);
+  @Override
+  public int update(int inputIndex, int outputPosition) {
+    return update(inputIndex, outputPosition, false);
+  }
+
+  @Override
+  public int update(RecordBatch batch, int inputIndex, int outputPosition, boolean useAggregate) {
+    setRecordBatchSizer(inputIndex, new RecordBatchSizer(batch));
+    return updateInternal(inputIndex, outputPosition, useAggregate);
+  }
+
+  @Override
+  public int update(RecordBatch batch, int inputIndex, int outputPosition) {
+    return update(batch, inputIndex, outputPosition, false);
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchMemoryManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchMemoryManager.java
index 759e597..993f3cb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchMemoryManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchMemoryManager.java
@@ -163,6 +163,19 @@ public class RecordBatchMemoryManager {
     updateIncomingStats(index);
   }
 
+  public int update(int inputIndex, int outputPosition, boolean useAggregate) {
+    // by default just return the outputRowCount
+    return getOutputRowCount();
+  }
+
+  public int update(RecordBatch batch, int inputIndex, int outputPosition) {
+    return getOutputRowCount();
+  }
+
+  public int update(RecordBatch batch, int inputIndex, int outputPosition, boolean useAggregate) {
+    return getOutputRowCount();
+  }
+
   public int getOutputRowCount() {
     return outputRowCount;
   }
@@ -205,8 +218,7 @@ public class RecordBatchMemoryManager {
   }
 
   public void setRecordBatchSizer(RecordBatchSizer sizer) {
-    this.sizer[DEFAULT_INPUT_INDEX] = sizer;
-    inputBatchStats[DEFAULT_INPUT_INDEX] = new BatchStats();
+    setRecordBatchSizer(DEFAULT_INPUT_INDEX, sizer);
   }
 
   public RecordBatchSizer getRecordBatchSizer(int index) {
@@ -261,7 +273,6 @@ public class RecordBatchMemoryManager {
     return UInt4Vector.VALUE_WIDTH;
   }
 
-
   public void allocateVectors(VectorContainer container, int recordCount) {
     // Allocate memory for the vectors.
     // This will iteratively allocate memory for all nested columns underneath.
@@ -269,10 +280,7 @@ public class RecordBatchMemoryManager {
       RecordBatchSizer.ColumnSize colSize = getColumnSize(w.getField().getName());
       colSize.allocateVector(w.getValueVector(), recordCount);
     }
-  }
-
-  public void allocateVectors(VectorContainer container) {
-    allocateVectors(container, outputRowCount);
+    container.setRecordCount(0);
   }
 
   public void allocateVectors(List<ValueVector> valueVectors, int recordCount) {
@@ -284,6 +292,10 @@ public class RecordBatchMemoryManager {
     }
   }
 
+  public void allocateVectors(VectorContainer container) {
+    allocateVectors(container, outputRowCount);
+  }
+
   public void allocateVectors(List<ValueVector> valueVectors) {
     allocateVectors(valueVectors, outputRowCount);
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java
index 7e531f8..a5cb05b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchSizer.java
@@ -251,12 +251,50 @@ public class RecordBatchSizer {
     }
 
     /**
-     * This returns actual entry size if rowCount > 0 or standard size otherwise.
+     * This returns actual entry size if rowCount > 0 or allocation size otherwise.
      * Use this for the cases when you might get empty batches with schema
      * and you still need to do memory calculations based on just schema.
      */
     public int getAllocSizePerEntry() {
-      return rowCount() == 0 ? getStdNetSizePerEntry() : getNetSizePerEntry();
+      if (rowCount() != 0) {
+        return getNetSizePerEntry();
+      }
+
+      int stdNetSize;
+      try {
+        stdNetSize = TypeHelper.getSize(metadata.getType());
+
+        // TypeHelper estimates 50 bytes for variable length. That is pretty high number
+        // to use as approximation for empty batches. Use 8 instead.
+        switch (metadata.getType().getMinorType()) {
+          case VARBINARY:
+          case VARCHAR:
+          case VAR16CHAR:
+          case VARDECIMAL:
+            stdNetSize = 4 + 8;
+            break;
+        }
+      } catch (Exception e) {
+        stdNetSize = 0;
+      }
+
+      if (isOptional) {
+        stdNetSize += BIT_VECTOR_WIDTH;
+      }
+
+      if (isRepeated) {
+        stdNetSize = (stdNetSize * STD_REPETITION_FACTOR) + OFFSET_VECTOR_WIDTH;
+      }
+
+      for (ColumnSize columnSize : children.values()) {
+        stdNetSize += columnSize.getAllocSizePerEntry();
+      }
+
+      if (isRepeatedList()) {
+        stdNetSize = (stdNetSize * STD_REPETITION_FACTOR) + OFFSET_VECTOR_WIDTH;
+      }
+
+      return stdNetSize;
     }
 
     /**
@@ -777,6 +815,13 @@ public class RecordBatchSizer {
     return (int) Math.ceil((double) num / denom);
   }
 
+  public static int safeDivide(int num, double denom) {
+    if (denom == 0) {
+      return 0;
+    }
+    return (int) Math.ceil((double) num / denom);
+  }
+
   public int rowCount() { return rowCount; }
   public int stdRowWidth() { return stdRowWidth; }
   public int grossRowWidth() { return grossRowWidth; }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestBuildSidePartitioningImpl.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestBuildSidePartitioningImpl.java
index 30c0c73..2a44edb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestBuildSidePartitioningImpl.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestBuildSidePartitioningImpl.java
@@ -39,8 +39,6 @@ public class TestBuildSidePartitioningImpl {
 
     calc.initialize(true,
       false,
-      buildValueSizes,
-      probeValueSizes,
       keySizes,
       200,
       2,
@@ -52,7 +50,7 @@ public class TestBuildSidePartitioningImpl {
       5,
       maxBatchNumRecords,
       maxBatchNumRecords,
-      10,
+      16000,
       .75);
 
     final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
@@ -84,8 +82,6 @@ public class TestBuildSidePartitioningImpl {
 
     calc.initialize(false,
       true,
-      buildValueSizes,
-      probeValueSizes,
       keySizes,
       350,
       2,
@@ -97,7 +93,7 @@ public class TestBuildSidePartitioningImpl {
       5,
       maxBatchNumRecords,
       maxBatchNumRecords,
-      10,
+      16000,
       .75);
 
     final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
@@ -130,8 +126,6 @@ public class TestBuildSidePartitioningImpl {
     calc.initialize(
       true,
       false,
-      buildValueSizes,
-      probeValueSizes,
       keySizes,
       200,
       4,
@@ -143,7 +137,7 @@ public class TestBuildSidePartitioningImpl {
       5,
       maxBatchNumRecords,
       maxBatchNumRecords,
-      10,
+      16000,
       .75);
 
     final HashJoinMemoryCalculator.PartitionStatSet partitionStatSet =
@@ -178,8 +172,6 @@ public class TestBuildSidePartitioningImpl {
     calc.initialize(
       true,
       false,
-      buildValueSizes,
-      probeValueSizes,
       keySizes,
       180,
       2,
@@ -191,7 +183,7 @@ public class TestBuildSidePartitioningImpl {
       5,
       maxBatchNumRecords,
       maxBatchNumRecords,
-      10,
+      16000,
       .75);
 
     final PartitionStatImpl partition1 = new PartitionStatImpl();
@@ -229,8 +221,6 @@ public class TestBuildSidePartitioningImpl {
     calc.initialize(
       true,
       false,
-      buildValueSizes,
-      probeValueSizes,
       keySizes,
       210,
       2,
@@ -242,7 +232,7 @@ public class TestBuildSidePartitioningImpl {
       5,
       maxBatchNumRecords,
       maxBatchNumRecords,
-      10,
+      16000,
       .75);
 
     final PartitionStatImpl partition1 = new PartitionStatImpl();
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java
index 9838670..da83b00 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestOutputBatchSize.java
@@ -25,6 +25,7 @@ import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.physical.base.AbstractBase;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.config.FlattenPOP;
+import org.apache.drill.exec.physical.config.HashJoinPOP;
 import org.apache.drill.exec.physical.config.MergeJoinPOP;
 import org.apache.drill.exec.physical.config.UnionAll;
 import org.apache.drill.exec.physical.impl.ScanBatch;
@@ -1353,6 +1354,391 @@ public class TestOutputBatchSize extends PhysicalOpUnitTestBase {
   }
 
   @Test
+  public void testHashJoinMultipleOutputBatches() throws Exception {
+    HashJoinPOP hashJoin = new HashJoinPOP(null, null,
+      Lists.newArrayList(joinCond("c1", "EQUALS", "c2")), JoinRelType.INNER);
+    mockOpContext(hashJoin, initReservation, maxAllocation);
+
+    numRows = 4000 * 2;
+    // create left input rows like this.
+    // "a1" : 5, "b1" : wideString, "c1" : <id>
+    List<String> leftJsonBatches = Lists.newArrayList();
+    StringBuilder leftBatchString = new StringBuilder();
+    leftBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i + "},");
+    }
+    leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows + "}");
+    leftBatchString.append("]");
+
+    leftJsonBatches.add(leftBatchString.toString());
+
+    // create right input rows like this.
+    // "a2" : 6, "b2" : wideString, "c2" : <id>
+    List<String> rightJsonBatches = Lists.newArrayList();
+    StringBuilder rightBatchString = new StringBuilder();
+    rightBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    rightBatchString.append("]");
+    rightJsonBatches.add(rightBatchString.toString());
+
+    // output rows will be like this.
+    // "a1" : 5, "b1" : wideString, "c1" : 1, "a2":6, "b2" : wideString, "c2": 1
+    // "a1" : 5, "b1" : wideString, "c1" : 2, "a2":6, "b2" : wideString, "c2": 2
+    // "a1" : 5, "b1" : wideString, "c1" : 3, "a2":6, "b2" : wideString, "c2": 3
+    List<String> expectedJsonBatches = Lists.newArrayList();
+    StringBuilder expectedBatchString = new StringBuilder();
+    expectedBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i);
+      expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows);
+    expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    expectedBatchString.append("]");
+    expectedJsonBatches.add(expectedBatchString.toString());
+
+    long totalSize = getExpectedSize(expectedJsonBatches);
+
+    // set the output batch size to 1/2 of total size expected.
+    // We will get approximately 4 batches because of fragmentation factor of 2 accounted for in merge join.
+    fragContext.getOptions().setLocalOption("drill.exec.memory.operator.output_batch_size", totalSize/2);
+
+    OperatorTestBuilder opTestBuilder = opTestBuilder()
+      .physicalOperator(hashJoin)
+      .baselineColumns("a1", "b1", "c1", "a2", "b2", "c2")
+      .expectedNumBatches(4)  // verify number of batches
+      .expectedBatchSize(totalSize / 2) // verify batch size
+      .inputDataStreamsJson(Lists.newArrayList(leftJsonBatches, rightJsonBatches));
+
+    for (long i = 0; i < numRows+1; i++) {
+      opTestBuilder.baselineValues(5l, wideString, i, 6l, wideString, i);
+    }
+
+    opTestBuilder.go();
+  }
+
+  @Test
+  public void testHashJoinSingleOutputBatch() throws Exception {
+    HashJoinPOP hashJoin = new HashJoinPOP(null, null,
+      Lists.newArrayList(joinCond("c1", "EQUALS", "c2")), JoinRelType.INNER);
+    mockOpContext(hashJoin, initReservation, maxAllocation);
+
+    // create multiple batches from both sides.
+    numRows = 4096 * 2;
+
+    // create left input rows like this.
+    // "a1" : 5, "b1" : wideString, "c1" : <id>
+    List<String> leftJsonBatches = Lists.newArrayList();
+    StringBuilder leftBatchString = new StringBuilder();
+    leftBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i + "},");
+    }
+    leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows + "}");
+    leftBatchString.append("]");
+
+    leftJsonBatches.add(leftBatchString.toString());
+
+    // create right input rows like this.
+    // "a2" : 6, "b2" : wideString, "c2" : <id>
+    List<String> rightJsonBatches = Lists.newArrayList();
+    StringBuilder rightBatchString = new StringBuilder();
+    rightBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    rightBatchString.append("]");
+    rightJsonBatches.add(rightBatchString.toString());
+
+    // output rows will be like this.
+    // "a1" : 5, "b1" : wideString, "c1" : 1, "a2":6, "b2" : wideString, "c2": 1
+    // "a1" : 5, "b1" : wideString, "c1" : 2, "a2":6, "b2" : wideString, "c2": 2
+    // "a1" : 5, "b1" : wideString, "c1" : 3, "a2":6, "b2" : wideString, "c2": 3
+    List<String> expectedJsonBatches = Lists.newArrayList();
+    StringBuilder expectedBatchString = new StringBuilder();
+    expectedBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i);
+      expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows);
+    expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    expectedBatchString.append("]");
+    expectedJsonBatches.add(expectedBatchString.toString());
+
+    long totalSize = getExpectedSize(expectedJsonBatches);
+
+    // set the output batch size to twice of total size expected.
+    // We should get 1 batch.
+    fragContext.getOptions().setLocalOption("drill.exec.memory.operator.output_batch_size", totalSize*2);
+
+    OperatorTestBuilder opTestBuilder = opTestBuilder()
+      .physicalOperator(hashJoin)
+      .baselineColumns("a1", "b1", "c1", "a2", "b2", "c2")
+      .expectedNumBatches(1)  // verify number of batches
+      .expectedBatchSize(totalSize) // verify batch size
+      .inputDataStreamsJson(Lists.newArrayList(leftJsonBatches, rightJsonBatches));
+
+    for (long i = 0; i < numRows + 1; i++) {
+      opTestBuilder.baselineValues(5l, wideString, i, 6l, wideString, i);
+    }
+
+    opTestBuilder.go();
+  }
+
+  @Test
+  public void testHashJoinUpperLimit() throws Exception {
+    // test the upper limit of 65535 records per batch.
+    HashJoinPOP hashJoin = new HashJoinPOP(null, null,
+      Lists.newArrayList(joinCond("c1", "EQUALS", "c2")), JoinRelType.INNER);
+    mockOpContext(hashJoin, initReservation, maxAllocation);
+
+    numRows = 100000;
+
+    // create left input rows like this.
+    // "a1" : 5,  "c1" : <id>
+    List<String> leftJsonBatches = Lists.newArrayList();
+    StringBuilder leftBatchString = new StringBuilder();
+    leftBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      leftBatchString.append("{\"a1\": 5, " + "\"c1\" : " + i + "},");
+    }
+    leftBatchString.append("{\"a1\": 5, " + "\"c1\" : " + numRows + "}");
+    leftBatchString.append("]");
+
+    leftJsonBatches.add(leftBatchString.toString());
+
+    // create right input rows like this.
+    // "a2" : 6, "c2" : <id>
+    List<String> rightJsonBatches = Lists.newArrayList();
+    StringBuilder rightBatchString = new StringBuilder();
+    rightBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      rightBatchString.append("{\"a2\": 6, " + "\"c2\" : " + i + "},");
+    }
+    rightBatchString.append("{\"a2\": 6, " + "\"c2\" : " + numRows + "}");
+    rightBatchString.append("]");
+    rightJsonBatches.add(rightBatchString.toString());
+
+    // output rows will be like this.
+    // "a1" : 5,  "c1" : 1, "a2":6,  "c2": 1
+    // "a1" : 5,  "c1" : 2, "a2":6,  "c2": 2
+    // "a1" : 5,  "c1" : 3, "a2":6,  "c2": 3
+
+    // expect two batches, batch limited by 65535 records
+    OperatorTestBuilder opTestBuilder = opTestBuilder()
+      .physicalOperator(hashJoin)
+      .baselineColumns("a1", "c1", "a2", "c2")
+      .expectedNumBatches(2)  // verify number of batches
+      .inputDataStreamsJson(Lists.newArrayList(leftJsonBatches, rightJsonBatches));
+
+    for (long i = 0; i < numRows + 1; i++) {
+      opTestBuilder.baselineValues(5l, i, 6l, i);
+    }
+
+    opTestBuilder.go();
+  }
+
+  @Test
+  public void testHashJoinLowerLimit() throws Exception {
+    // test the lower limit of at least one batch
+    HashJoinPOP hashJoin = new HashJoinPOP(null, null,
+      Lists.newArrayList(joinCond("c1", "EQUALS", "c2")), JoinRelType.INNER);
+    mockOpContext(hashJoin, initReservation, maxAllocation);
+
+    numRows = 10;
+
+    // create left input rows like this.
+    // "a1" : 5, "b1" : wideString, "c1" : <id>
+    List<String> leftJsonBatches = Lists.newArrayList();
+    StringBuilder leftBatchString = new StringBuilder();
+    leftBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i + "},");
+    }
+    leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows + "}");
+    leftBatchString.append("]");
+
+    leftJsonBatches.add(leftBatchString.toString());
+
+    // create right input rows like this.
+    // "a2" : 6, "b2" : wideString, "c2" : <id>
+    List<String> rightJsonBatches = Lists.newArrayList();
+    StringBuilder rightBatchString = new StringBuilder();
+    rightBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    rightBatchString.append("]");
+    rightJsonBatches.add(rightBatchString.toString());
+
+    // output rows will be like this.
+    // "a1" : 5, "b1" : wideString, "c1" : 1, "a2":6, "b2" : wideString, "c2": 1
+    // "a1" : 5, "b1" : wideString, "c1" : 2, "a2":6, "b2" : wideString, "c2": 2
+    // "a1" : 5, "b1" : wideString, "c1" : 3, "a2":6, "b2" : wideString, "c2": 3
+
+    // set very low value of output batch size so we can do only one row per batch.
+    fragContext.getOptions().setLocalOption("drill.exec.memory.operator.output_batch_size", 128);
+
+    OperatorTestBuilder opTestBuilder = opTestBuilder()
+      .physicalOperator(hashJoin)
+      .baselineColumns("a1", "b1", "c1", "a2", "b2", "c2")
+      .expectedNumBatches(10)  // verify number of batches
+      .inputDataStreamsJson(Lists.newArrayList(leftJsonBatches, rightJsonBatches));
+
+    for (long i = 0; i < numRows + 1; i++) {
+      opTestBuilder.baselineValues(5l, wideString, i, 6l, wideString, i);
+    }
+
+    opTestBuilder.go();
+  }
+
+  @Test
+  public void testRightOuterHashJoin() throws Exception {
+
+    HashJoinPOP hashJoin = new HashJoinPOP(null, null,
+      Lists.newArrayList(joinCond("c1", "EQUALS", "c2")), JoinRelType.RIGHT);
+    mockOpContext(hashJoin, initReservation, maxAllocation);
+
+    numRows = 4000 * 2;
+    // create left input rows like this.
+    // "a1" : 5, "b1" : wideString, "c1" : <id>
+    List<String> leftJsonBatches = Lists.newArrayList();
+    StringBuilder leftBatchString = new StringBuilder();
+    leftBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i + "},");
+    }
+    leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows + "}");
+    leftBatchString.append("]");
+
+    leftJsonBatches.add(leftBatchString.toString());
+
+    // create right input rows like this.
+    // "a2" : 6, "b2" : wideString, "c2" : <id>
+    List<String> rightJsonBatches = Lists.newArrayList();
+    StringBuilder rightBatchString = new StringBuilder();
+    rightBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    rightBatchString.append("]");
+    rightJsonBatches.add(rightBatchString.toString());
+
+    // output rows will be like this.
+    // "a1" : 5, "b1" : wideString, "c1" : 1, "a2":6, "b2" : wideString, "c2": 1
+    // "a1" : 5, "b1" : wideString, "c1" : 2, "a2":6, "b2" : wideString, "c2": 2
+    // "a1" : 5, "b1" : wideString, "c1" : 3, "a2":6, "b2" : wideString, "c2": 3
+    List<String> expectedJsonBatches = Lists.newArrayList();
+    StringBuilder expectedBatchString = new StringBuilder();
+    expectedBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i);
+      expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows);
+    expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    expectedBatchString.append("]");
+    expectedJsonBatches.add(expectedBatchString.toString());
+
+    long totalSize = getExpectedSize(expectedJsonBatches);
+
+    // set the output batch size to 1/2 of total size expected.
+    // We will get approximately 4 batches because of fragmentation factor of 2 accounted for in merge join.
+    fragContext.getOptions().setLocalOption("drill.exec.memory.operator.output_batch_size", totalSize/2);
+
+    OperatorTestBuilder opTestBuilder = opTestBuilder()
+      .physicalOperator(hashJoin)
+      .baselineColumns("a1", "b1", "c1", "a2", "b2", "c2")
+      .expectedNumBatches(4)  // verify number of batches
+      .expectedBatchSize(totalSize / 2) // verify batch size
+      .inputDataStreamsJson(Lists.newArrayList(leftJsonBatches, rightJsonBatches));
+
+    for (long i = 0; i < numRows + 1; i++) {
+      opTestBuilder.baselineValues(5l, wideString, i, 6l, wideString, i);
+    }
+
+    opTestBuilder.go();
+  }
+
+  @Test
+  public void testLeftOuterHashJoin() throws Exception {
+
+    HashJoinPOP hashJoin = new HashJoinPOP(null, null,
+      Lists.newArrayList(joinCond("c1", "EQUALS", "c2")), JoinRelType.LEFT);
+    mockOpContext(hashJoin, initReservation, maxAllocation);
+
+    numRows = 4000 * 2;
+    // create left input rows like this.
+    // "a1" : 5, "b1" : wideString, "c1" : <id>
+    List<String> leftJsonBatches = Lists.newArrayList();
+    StringBuilder leftBatchString = new StringBuilder();
+    leftBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i + "},");
+    }
+    leftBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows + "}");
+    leftBatchString.append("]");
+
+    leftJsonBatches.add(leftBatchString.toString());
+
+    // create right input rows like this.
+    // "a2" : 6, "b2" : wideString, "c2" : <id>
+    List<String> rightJsonBatches = Lists.newArrayList();
+    StringBuilder rightBatchString = new StringBuilder();
+    rightBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    rightBatchString.append("{\"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    rightBatchString.append("]");
+    rightJsonBatches.add(rightBatchString.toString());
+
+    // output rows will be like this.
+    // "a1" : 5, "b1" : wideString, "c1" : 1, "a2":6, "b2" : wideString, "c2": 1
+    // "a1" : 5, "b1" : wideString, "c1" : 2, "a2":6, "b2" : wideString, "c2": 2
+    // "a1" : 5, "b1" : wideString, "c1" : 3, "a2":6, "b2" : wideString, "c2": 3
+    List<String> expectedJsonBatches = Lists.newArrayList();
+    StringBuilder expectedBatchString = new StringBuilder();
+    expectedBatchString.append("[");
+    for (int i = 0; i < numRows; i++) {
+      expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + i);
+      expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + i + "},");
+    }
+    expectedBatchString.append("{\"a1\": 5, " + "\"b1\" : " + "\"" + wideString + "\"," + "\"c1\" : " + numRows);
+    expectedBatchString.append(", \"a2\": 6, " + "\"b2\" : " + "\"" + wideString + "\"," + "\"c2\" : " + numRows + "}");
+    expectedBatchString.append("]");
+    expectedJsonBatches.add(expectedBatchString.toString());
+
+    long totalSize = getExpectedSize(expectedJsonBatches);
+
+    // set the output batch size to 1/2 of total size expected.
+    // We will get approximately 4 batches because of fragmentation factor of 2 accounted for in merge join.
+    fragContext.getOptions().setLocalOption("drill.exec.memory.operator.output_batch_size", totalSize/2);
+
+    OperatorTestBuilder opTestBuilder = opTestBuilder()
+      .physicalOperator(hashJoin)
+      .baselineColumns("a1", "b1", "c1", "a2", "b2", "c2")
+      .expectedNumBatches(4)  // verify number of batches
+      .expectedBatchSize(totalSize / 2) // verify batch size
+      .inputDataStreamsJson(Lists.newArrayList(leftJsonBatches, rightJsonBatches));
+
+    for (long i = 0; i < numRows+1; i++) {
+      opTestBuilder.baselineValues(5l, wideString, i, 6l, wideString, i);
+    }
+
+    opTestBuilder.go();
+
+  }
+
+  @Test
   public void testSizerRepeatedList() throws Exception {
     List<String> inputJsonBatches = Lists.newArrayList();
     StringBuilder batchString = new StringBuilder();

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.

[drill] 06/10: DRILL-6450: Visualized plans for profiles querying JDBC sources is broken

Posted by pa...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

parthc pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git

commit 494d828f06ddda9c8091920cf56731bbd00abab0
Author: Kunal Khatua <ku...@apache.org>
AuthorDate: Tue May 29 11:20:56 2018 -0700

    DRILL-6450: Visualized plans for profiles querying JDBC sources is broken
    
    When viewing a profile for a query against a JDBC source, the visualized plan is not rendered. This is because the generated SQL pushed down to the JDBC source has a line break injected just before the FROM clause.
    
    The workaround is to strip away any injected newlines ('\\n') at least for the SQL defined in the text plan, so that the backend Javascript can render it correctly.
    In addition, any single line comments are also removed, but any block comments (i.e. /* .. */ ) are retained as they might carry hints.
    
    This closes #1295
---
 .../java/org/apache/drill/exec/store/jdbc/JdbcPrel.java   | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)

diff --git a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcPrel.java b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcPrel.java
index abeca23..ac6f31c 100644
--- a/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcPrel.java
+++ b/contrib/storage-jdbc/src/main/java/org/apache/drill/exec/store/jdbc/JdbcPrel.java
@@ -44,7 +44,6 @@ import org.apache.drill.exec.store.jdbc.JdbcStoragePlugin.DrillJdbcConvention;
  * Represents a JDBC Plan once the children nodes have been rewritten into SQL.
  */
 public class JdbcPrel extends AbstractRelNode implements Prel {
-
   private final String sql;
   private final double rows;
   private final DrillJdbcConvention convention;
@@ -66,6 +65,18 @@ public class JdbcPrel extends AbstractRelNode implements Prel {
     rowType = input.getRowType();
   }
 
+  //Substitute newline. Also stripping away single line comments. Expecting hints to be nested in '/* <hint> */'
+  private String stripToOneLineSql(String sql) {
+    StringBuilder strippedSqlTextBldr = new StringBuilder(sql.length());
+    String sqlToken[] = sql.split("\\n");
+    for (String sqlTextLine : sqlToken) {
+      if (!sqlTextLine.trim().startsWith("--")) { //Skip comments
+        strippedSqlTextBldr.append(sqlTextLine).append(' ');
+      }
+    }
+    return strippedSqlTextBldr.toString();
+  }
+
   private class SubsetRemover extends RelShuttleImpl {
 
     @Override
@@ -87,7 +98,7 @@ public class JdbcPrel extends AbstractRelNode implements Prel {
 
   @Override
   public RelWriter explainTerms(RelWriter pw) {
-    return super.explainTerms(pw).item("sql", sql);
+    return super.explainTerms(pw).item("sql", stripToOneLineSql(sql));
   }
 
   @Override

-- 
To stop receiving notification emails like this one, please contact
parthc@apache.org.