You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ay...@apache.org on 2022/05/19 04:16:42 UTC

[hive] branch master updated: HIVE-26223: Integrate ESRI GeoSpatial UDFs. (#3283). (Ayush Saxena, reviewed by Mahesh Kumar Behera)

This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 6bfb6f6a367 HIVE-26223: Integrate ESRI GeoSpatial UDFs. (#3283). (Ayush Saxena, reviewed by Mahesh Kumar Behera)
6bfb6f6a367 is described below

commit 6bfb6f6a3676ac692d92a08b88f439794f20b488
Author: Ayush Saxena <ay...@apache.org>
AuthorDate: Thu May 19 09:46:35 2022 +0530

    HIVE-26223: Integrate ESRI GeoSpatial UDFs. (#3283). (Ayush Saxena, reviewed by Mahesh Kumar Behera)
---
 ql/pom.xml                                         |   7 +
 .../hadoop/hive/ql/exec/FunctionRegistry.java      | 171 ++++++++
 .../apache/hadoop/hive/ql/udf/esri/BinUtils.java   |  93 ++++
 .../hadoop/hive/ql/udf/esri/GeometryUtils.java     | 317 ++++++++++++++
 .../hadoop/hive/ql/udf/esri/HiveGeometry.java      |  21 +
 .../hive/ql/udf/esri/HiveGeometryOIHelper.java     | 176 ++++++++
 .../apache/hadoop/hive/ql/udf/esri/LogUtils.java   | 113 +++++
 .../hive/ql/udf/esri/ST_Aggr_ConvexHull.java       | 144 +++++++
 .../hive/ql/udf/esri/ST_Aggr_Intersection.java     | 111 +++++
 .../hadoop/hive/ql/udf/esri/ST_Aggr_Union.java     | 126 ++++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Area.java    |  70 +++
 .../hadoop/hive/ql/udf/esri/ST_AsBinary.java       |  65 +++
 .../hadoop/hive/ql/udf/esri/ST_AsGeoJson.java      |  76 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_AsJson.java  |  68 +++
 .../apache/hadoop/hive/ql/udf/esri/ST_AsShape.java |  58 +++
 .../apache/hadoop/hive/ql/udf/esri/ST_AsText.java  |  99 +++++
 .../org/apache/hadoop/hive/ql/udf/esri/ST_Bin.java |  90 ++++
 .../hadoop/hive/ql/udf/esri/ST_BinEnvelope.java    | 118 ++++++
 .../hadoop/hive/ql/udf/esri/ST_Boundary.java       |  74 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Buffer.java  |  50 +++
 .../hadoop/hive/ql/udf/esri/ST_Centroid.java       |  53 +++
 .../hadoop/hive/ql/udf/esri/ST_Contains.java       |  42 ++
 .../hadoop/hive/ql/udf/esri/ST_ConvexHull.java     | 109 +++++
 .../hadoop/hive/ql/udf/esri/ST_CoordDim.java       |  72 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Crosses.java |  41 ++
 .../hadoop/hive/ql/udf/esri/ST_Difference.java     |  76 ++++
 .../hadoop/hive/ql/udf/esri/ST_Dimension.java      |  85 ++++
 .../hadoop/hive/ql/udf/esri/ST_Disjoint.java       |  41 ++
 .../hadoop/hive/ql/udf/esri/ST_Distance.java       |  78 ++++
 .../hadoop/hive/ql/udf/esri/ST_EndPoint.java       |  67 +++
 .../hadoop/hive/ql/udf/esri/ST_EnvIntersects.java  |  84 ++++
 .../hadoop/hive/ql/udf/esri/ST_Envelope.java       |  78 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Equals.java  |  40 ++
 .../hadoop/hive/ql/udf/esri/ST_ExteriorRing.java   |  94 +++++
 .../hive/ql/udf/esri/ST_GeodesicLengthWGS84.java   | 102 +++++
 .../hadoop/hive/ql/udf/esri/ST_GeomCollection.java |  82 ++++
 .../hive/ql/udf/esri/ST_GeomFromGeoJson.java       | 112 +++++
 .../hadoop/hive/ql/udf/esri/ST_GeomFromJson.java   | 108 +++++
 .../hadoop/hive/ql/udf/esri/ST_GeomFromShape.java  |  72 ++++
 .../hadoop/hive/ql/udf/esri/ST_GeomFromText.java   |  88 ++++
 .../hadoop/hive/ql/udf/esri/ST_GeomFromWKB.java    |  95 +++++
 .../hadoop/hive/ql/udf/esri/ST_Geometry.java       |  24 ++
 .../hive/ql/udf/esri/ST_GeometryAccessor.java      |  26 ++
 .../hadoop/hive/ql/udf/esri/ST_GeometryN.java      |  81 ++++
 .../hive/ql/udf/esri/ST_GeometryProcessing.java    |  22 +
 .../hive/ql/udf/esri/ST_GeometryRelational.java    | 111 +++++
 .../hadoop/hive/ql/udf/esri/ST_GeometryType.java   |  70 +++
 .../hadoop/hive/ql/udf/esri/ST_InteriorRingN.java  |  76 ++++
 .../hadoop/hive/ql/udf/esri/ST_Intersection.java   |  67 +++
 .../hadoop/hive/ql/udf/esri/ST_Intersects.java     |  41 ++
 .../apache/hadoop/hive/ql/udf/esri/ST_Is3D.java    |  74 ++++
 .../hadoop/hive/ql/udf/esri/ST_IsClosed.java       | 104 +++++
 .../apache/hadoop/hive/ql/udf/esri/ST_IsEmpty.java |  76 ++++
 .../hadoop/hive/ql/udf/esri/ST_IsMeasured.java     |  74 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_IsRing.java  |  88 ++++
 .../hadoop/hive/ql/udf/esri/ST_IsSimple.java       |  82 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Length.java  |  66 +++
 .../hadoop/hive/ql/udf/esri/ST_LineFromWKB.java    |  79 ++++
 .../hadoop/hive/ql/udf/esri/ST_LineString.java     | 159 +++++++
 .../org/apache/hadoop/hive/ql/udf/esri/ST_M.java   |  82 ++++
 .../hadoop/hive/ql/udf/esri/ST_MLineFromWKB.java   |  80 ++++
 .../hadoop/hive/ql/udf/esri/ST_MPointFromWKB.java  |  80 ++++
 .../hadoop/hive/ql/udf/esri/ST_MPolyFromWKB.java   |  80 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MaxM.java    |  80 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MaxX.java    |  86 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MaxY.java    |  86 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MaxZ.java    |  80 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MinM.java    |  80 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MinX.java    |  86 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MinY.java    |  86 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_MinZ.java    |  80 ++++
 .../hive/ql/udf/esri/ST_MultiLineString.java       | 107 +++++
 .../hadoop/hive/ql/udf/esri/ST_MultiPoint.java     |  97 +++++
 .../hadoop/hive/ql/udf/esri/ST_MultiPolygon.java   | 118 ++++++
 .../hadoop/hive/ql/udf/esri/ST_NumGeometries.java  |  83 ++++
 .../hive/ql/udf/esri/ST_NumInteriorRing.java       |  79 ++++
 .../hadoop/hive/ql/udf/esri/ST_NumPoints.java      |  99 +++++
 .../hadoop/hive/ql/udf/esri/ST_Overlaps.java       |  41 ++
 .../apache/hadoop/hive/ql/udf/esri/ST_Point.java   |  99 +++++
 .../hadoop/hive/ql/udf/esri/ST_PointFromWKB.java   |  79 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_PointN.java  | 106 +++++
 .../apache/hadoop/hive/ql/udf/esri/ST_PointZ.java  |  45 ++
 .../hadoop/hive/ql/udf/esri/ST_PolyFromWKB.java    |  79 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Polygon.java | 103 +++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Relate.java  |  67 +++
 .../apache/hadoop/hive/ql/udf/esri/ST_SRID.java    |  52 +++
 .../apache/hadoop/hive/ql/udf/esri/ST_SetSRID.java |  52 +++
 .../hadoop/hive/ql/udf/esri/ST_StartPoint.java     |  67 +++
 .../hadoop/hive/ql/udf/esri/ST_SymmetricDiff.java  |  82 ++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Touches.java |  41 ++
 .../apache/hadoop/hive/ql/udf/esri/ST_Union.java   | 117 +++++
 .../apache/hadoop/hive/ql/udf/esri/ST_Within.java  |  41 ++
 .../org/apache/hadoop/hive/ql/udf/esri/ST_X.java   |  74 ++++
 .../org/apache/hadoop/hive/ql/udf/esri/ST_Y.java   |  74 ++++
 .../org/apache/hadoop/hive/ql/udf/esri/ST_Z.java   |  86 ++++
 .../hive/ql/udf/esri/serde/BaseJsonSerDe.java      | 441 +++++++++++++++++++
 .../hive/ql/udf/esri/serde/EsriJsonSerDe.java      |  37 ++
 .../hive/ql/udf/esri/serde/GeoJsonSerDe.java       |  55 +++
 .../hadoop/hive/ql/udf/esri/serde/JsonSerde.java   |  21 +
 .../hadoop/hive/ql/udf/esri/shims/HiveShims.java   | 193 +++++++++
 .../hadoop/hive/ql/udf/esri/TestStAsShape.java     |  57 +++
 .../hadoop/hive/ql/udf/esri/TestStCentroid.java    | 104 +++++
 .../hive/ql/udf/esri/TestStGeomFromShape.java      | 223 ++++++++++
 .../hive/ql/udf/esri/TestStGeometryType.java       |  66 +++
 .../hadoop/hive/ql/udf/esri/TestStLineString.java  |  70 +++
 .../apache/hadoop/hive/ql/udf/esri/TestStMinX.java |  47 +++
 .../apache/hadoop/hive/ql/udf/esri/TestStMinY.java |  38 ++
 .../hadoop/hive/ql/udf/esri/TestStMultiPoint.java  |  40 ++
 .../hive/ql/udf/esri/TestStMultiPolygon.java       | 117 +++++
 .../hadoop/hive/ql/udf/esri/TestStPoint.java       |  58 +++
 .../apache/hadoop/hive/ql/udf/esri/TestStX.java    |  41 ++
 .../apache/hadoop/hive/ql/udf/esri/TestStY.java    |  41 ++
 .../ql/udf/esri/serde/JsonSerDeTestingBase.java    | 195 +++++++++
 .../hive/ql/udf/esri/serde/TestEsriJsonSerDe.java  | 470 +++++++++++++++++++++
 .../hive/ql/udf/esri/serde/TestGeoJsonSerDe.java   | 333 +++++++++++++++
 .../queries/clientpositive/geospatial_binary.q     |  21 +
 .../queries/clientpositive/geospatial_decimal.q    |  23 +
 .../clientpositive/llap/geospatial_binary.q.out    |  52 +++
 .../clientpositive/llap/geospatial_decimal.q.out   |  56 +++
 .../clientpositive/llap/show_functions.q.out       | 174 ++++++++
 120 files changed, 10833 insertions(+)

diff --git a/ql/pom.xml b/ql/pom.xml
index 018f32f77bf..9f2662b3988 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -849,6 +849,12 @@
         </exclusion>
       </exclusions>
     </dependency>
+    <dependency>
+      <groupId>com.esri.geometry</groupId>
+      <artifactId>esri-geometry-api</artifactId>
+      <scope>compile</scope>
+      <version>2.2.4</version>
+    </dependency>
   </dependencies>
   <profiles>
     <profile>
@@ -1063,6 +1069,7 @@
                   <include>org.apache.datasketches:*</include>
                   <include>org.apache.calcite:*</include>
                   <include>org.apache.calcite.avatica:avatica</include>
+                  <include>com.esri.geometry:esri-geometry-api</include>
                 </includes>
               </artifactSet>
               <filters>
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 92382bde402..3fc5fd45626 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -32,6 +32,89 @@ import java.util.TreeSet;
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
+import org.apache.hadoop.hive.ql.udf.esri.ST_Aggr_ConvexHull;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Aggr_Union;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Area;
+import org.apache.hadoop.hive.ql.udf.esri.ST_AsBinary;
+import org.apache.hadoop.hive.ql.udf.esri.ST_AsGeoJson;
+import org.apache.hadoop.hive.ql.udf.esri.ST_AsJson;
+import org.apache.hadoop.hive.ql.udf.esri.ST_AsShape;
+import org.apache.hadoop.hive.ql.udf.esri.ST_AsText;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Bin;
+import org.apache.hadoop.hive.ql.udf.esri.ST_BinEnvelope;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Boundary;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Buffer;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Centroid;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Contains;
+import org.apache.hadoop.hive.ql.udf.esri.ST_ConvexHull;
+import org.apache.hadoop.hive.ql.udf.esri.ST_CoordDim;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Crosses;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Difference;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Dimension;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Disjoint;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Distance;
+import org.apache.hadoop.hive.ql.udf.esri.ST_EndPoint;
+import org.apache.hadoop.hive.ql.udf.esri.ST_EnvIntersects;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Envelope;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Equals;
+import org.apache.hadoop.hive.ql.udf.esri.ST_ExteriorRing;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeodesicLengthWGS84;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeomCollection;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeomFromGeoJson;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeomFromJson;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeomFromShape;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeomFromText;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeomFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeometryN;
+import org.apache.hadoop.hive.ql.udf.esri.ST_GeometryProcessing;
+import org.apache.hadoop.hive.ql.udf.esri.ST_InteriorRingN;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Intersection;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Intersects;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Is3D;
+import org.apache.hadoop.hive.ql.udf.esri.ST_IsClosed;
+import org.apache.hadoop.hive.ql.udf.esri.ST_IsEmpty;
+import org.apache.hadoop.hive.ql.udf.esri.ST_IsMeasured;
+import org.apache.hadoop.hive.ql.udf.esri.ST_IsRing;
+import org.apache.hadoop.hive.ql.udf.esri.ST_IsSimple;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Length;
+import org.apache.hadoop.hive.ql.udf.esri.ST_LineFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_LineString;
+import org.apache.hadoop.hive.ql.udf.esri.ST_M;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MLineFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MPointFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MPolyFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MaxM;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MaxX;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MaxY;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MaxZ;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MinM;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MinX;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MinY;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MinZ;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MultiLineString;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MultiPoint;
+import org.apache.hadoop.hive.ql.udf.esri.ST_MultiPolygon;
+import org.apache.hadoop.hive.ql.udf.esri.ST_NumGeometries;
+import org.apache.hadoop.hive.ql.udf.esri.ST_NumInteriorRing;
+import org.apache.hadoop.hive.ql.udf.esri.ST_NumPoints;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Overlaps;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Point;
+import org.apache.hadoop.hive.ql.udf.esri.ST_PointFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_PointN;
+import org.apache.hadoop.hive.ql.udf.esri.ST_PointZ;
+import org.apache.hadoop.hive.ql.udf.esri.ST_PolyFromWKB;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Polygon;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Relate;
+import org.apache.hadoop.hive.ql.udf.esri.ST_SRID;
+import org.apache.hadoop.hive.ql.udf.esri.ST_SetSRID;
+import org.apache.hadoop.hive.ql.udf.esri.ST_StartPoint;
+import org.apache.hadoop.hive.ql.udf.esri.ST_SymmetricDiff;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Touches;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Union;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Within;
+import org.apache.hadoop.hive.ql.udf.esri.ST_X;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Y;
+import org.apache.hadoop.hive.ql.udf.esri.ST_Z;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -586,6 +669,94 @@ public final class FunctionRegistry {
     system.registerGenericUDF(GenericUDFMaskShowLastN.UDF_NAME, GenericUDFMaskShowLastN.class);
     system.registerGenericUDF(GenericUDFMaskHash.UDF_NAME, GenericUDFMaskHash.class);
 
+    // GeoSpatial UDFs
+    system.registerFunction("ST_Length", ST_Length.class);
+    system.registerFunction("ST_LineString", ST_LineString.class);
+    system.registerFunction("ST_Point", ST_Point.class);
+    system.registerFunction("ST_AsText", ST_AsText.class);
+    system.registerFunction("ST_Aggr_ConvexHull", ST_Aggr_ConvexHull.class);
+    system.registerFunction("ST_Aggr_Union", ST_Aggr_Union.class);
+    system.registerFunction("ST_Area", ST_Area.class);
+    system.registerFunction("ST_AsBinary", ST_AsBinary.class);
+    system.registerFunction("ST_AsGeoJson", ST_AsGeoJson.class);
+    system.registerFunction("ST_AsJson", ST_AsJson.class);
+    system.registerFunction("ST_AsShape", ST_AsShape.class);
+    system.registerFunction("ST_Bin", ST_Bin.class);
+    system.registerFunction("ST_BinEnvelope", ST_BinEnvelope.class);
+    system.registerFunction("ST_Boundary", ST_Boundary.class);
+    system.registerFunction("ST_Buffer", ST_Buffer.class);
+    system.registerFunction("ST_Centroid", ST_Centroid.class);
+    system.registerFunction("ST_Contains", ST_Contains.class);
+    system.registerFunction("ST_ConvexHull", ST_ConvexHull.class);
+    system.registerFunction("ST_CoordDim", ST_CoordDim.class);
+    system.registerFunction("ST_Crosses", ST_Crosses.class);
+    system.registerFunction("ST_Difference", ST_Difference.class);
+    system.registerFunction("ST_Dimension", ST_Dimension.class);
+    system.registerFunction("ST_Disjoint", ST_Disjoint.class);
+    system.registerFunction("ST_Distance", ST_Distance.class);
+    system.registerFunction("ST_EndPoint", ST_EndPoint.class);
+    system.registerFunction("ST_Envelope", ST_Envelope.class);
+    system.registerFunction("ST_EnvIntersects", ST_EnvIntersects.class);
+    system.registerFunction("ST_Equals", ST_Equals.class);
+    system.registerFunction("ST_ExteriorRing", ST_ExteriorRing.class);
+    system.registerFunction("ST_GeodesicLengthWGS84", ST_GeodesicLengthWGS84.class);
+    system.registerFunction("ST_GeomCollection", ST_GeomCollection.class);
+    system.registerFunction("ST_GeometryN", ST_GeometryN.class);
+    system.registerFunction("ST_GeometryProcessing", ST_GeometryProcessing.class);
+    system.registerFunction("ST_GeomFromGeoJson", ST_GeomFromGeoJson.class);
+    system.registerFunction("ST_GeomFromJson", ST_GeomFromJson.class);
+    system.registerFunction("ST_GeomFromShape", ST_GeomFromShape.class);
+    system.registerFunction("ST_GeomFromText", ST_GeomFromText.class);
+    system.registerFunction("ST_GeomFromWKB", ST_GeomFromWKB.class);
+    system.registerFunction("ST_InteriorRingN", ST_InteriorRingN.class);
+    system.registerFunction("ST_Intersection", ST_Intersection.class);
+    system.registerFunction("ST_Intersects", ST_Intersects.class);
+    system.registerFunction("ST_Is3D", ST_Is3D.class);
+    system.registerFunction("ST_IsClosed", ST_IsClosed.class);
+    system.registerFunction("ST_IsEmpty", ST_IsEmpty.class);
+    system.registerFunction("ST_IsMeasured", ST_IsMeasured.class);
+    system.registerFunction("ST_IsRing", ST_IsRing.class);
+    system.registerFunction("ST_IsSimple", ST_IsSimple.class);
+    system.registerFunction("ST_LineFromWKB", ST_LineFromWKB.class);
+    system.registerFunction("ST_M", ST_M.class);
+    system.registerFunction("ST_MaxM", ST_MaxM.class);
+    system.registerFunction("ST_MaxX", ST_MaxX.class);
+
+    system.registerFunction("ST_MaxY", ST_MaxY.class);
+    system.registerFunction("ST_MaxZ", ST_MaxZ.class);
+    system.registerFunction("ST_MinM", ST_MinM.class);
+    system.registerFunction("ST_MinX", ST_MinX.class);
+    system.registerFunction("ST_MinY", ST_MinY.class);
+    system.registerFunction("ST_MinZ", ST_MinZ.class);
+    system.registerFunction("ST_MLineFromWKB", ST_MLineFromWKB.class);
+    system.registerFunction("ST_MPointFromWKB", ST_MPointFromWKB.class);
+    system.registerFunction("ST_MPolyFromWKB", ST_MPolyFromWKB.class);
+    system.registerFunction("ST_MultiLineString", ST_MultiLineString.class);
+    system.registerFunction("ST_MultiPoint", ST_MultiPoint.class);
+    system.registerFunction("ST_MultiPolygon", ST_MultiPolygon.class);
+    system.registerFunction("ST_NumGeometries", ST_NumGeometries.class);
+    system.registerFunction("ST_NumInteriorRing", ST_NumInteriorRing.class);
+    system.registerFunction("ST_NumPoints", ST_NumPoints.class);
+    system.registerFunction("ST_Overlaps", ST_Overlaps.class);
+    system.registerFunction("ST_PointFromWKB", ST_PointFromWKB.class);
+    system.registerFunction("ST_PointN", ST_PointN.class);
+
+    system.registerFunction("ST_PointZ", ST_PointZ.class);
+    system.registerFunction("ST_PolyFromWKB", ST_PolyFromWKB.class);
+    system.registerFunction("ST_Polygon", ST_Polygon.class);
+    system.registerFunction("ST_Relate", ST_Relate.class);
+    system.registerFunction("ST_SetSRID", ST_SetSRID.class);
+    system.registerFunction("ST_SRID", ST_SRID.class);
+    system.registerFunction("ST_StartPoint", ST_StartPoint.class);
+    system.registerFunction("ST_SymmetricDiff", ST_SymmetricDiff.class);
+    system.registerFunction("ST_Touches", ST_Touches.class);
+    system.registerFunction("ST_Union", ST_Union.class);
+    system.registerFunction("ST_Within", ST_Within.class);
+    system.registerFunction("ST_X", ST_X.class);
+    system.registerFunction("ST_Y", ST_Y.class);
+    system.registerFunction("ST_Z", ST_Z.class);
+
+
     try {
       system.registerGenericUDF("iceberg_bucket",
           (Class<? extends GenericUDF>) Class.forName("org.apache.iceberg.mr.hive.GenericUDFIcebergBucket"));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/BinUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/BinUtils.java
new file mode 100755
index 00000000000..9ed75a358b4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/BinUtils.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+
+public class BinUtils {
+  final long numCols;
+  final double extentMin;
+  final double extentMax;
+  final double binSize;
+
+  public BinUtils(double binSize) {
+    this.binSize = binSize;
+
+    // absolute max number of rows/columns we can have
+    long maxBinsPerAxis = (long) Math.sqrt(Long.MAX_VALUE);
+
+    // a smaller binSize gives us a smaller extent width and height that
+    // can be addressed by a single 64 bit long
+    double size = (binSize < 1) ? maxBinsPerAxis * binSize : maxBinsPerAxis;
+
+    extentMax = size / 2;
+    extentMin = extentMax - size;
+    numCols = (long) (Math.ceil(size / binSize));
+  }
+
+  /**
+   * Gets bin ID from a point.
+   *
+   * @param x
+   * @param y
+   * @return
+   */
+  public long getId(double x, double y) {
+    double down = (extentMax - y) / binSize;
+    double over = (x - extentMin) / binSize;
+
+    return ((long) down * numCols) + (long) over;
+  }
+
+  /**
+   * Gets the envelope for the bin ID.
+   *
+   * @param binId
+   * @param envelope
+   */
+  public void queryEnvelope(long binId, Envelope envelope) {
+    long down = binId / numCols;
+    long over = binId % numCols;
+
+    double xmin = extentMin + (over * binSize);
+    double xmax = xmin + binSize;
+    double ymax = extentMax - (down * binSize);
+    double ymin = ymax - binSize;
+
+    envelope.setCoords(xmin, ymin, xmax, ymax);
+  }
+
+  /**
+   * Gets the envelope for the bin that contains the x,y coords.
+   *
+   * @param x
+   * @param y
+   * @param envelope
+   */
+  public void queryEnvelope(double x, double y, Envelope envelope) {
+    double down = (extentMax - y) / binSize;
+    double over = (x - extentMin) / binSize;
+
+    double xmin = extentMin + (over * binSize);
+    double xmax = xmin + binSize;
+    double ymax = extentMax - (down * binSize);
+    double ymin = ymax - binSize;
+
+    envelope.setCoords(xmin, ymin, xmax, ymax);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/GeometryUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/GeometryUtils.java
new file mode 100755
index 00000000000..a18785c9a35
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/GeometryUtils.java
@@ -0,0 +1,317 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.MapGeometry;
+import com.esri.core.geometry.OperatorImportFromESRIShape;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class GeometryUtils {
+
+  private static final int SIZE_WKID = 4;
+  private static final int SIZE_TYPE = 1;
+
+  public static final int WKID_UNKNOWN = 0;
+
+  public enum OGCType {
+    UNKNOWN(0),
+    ST_POINT(1),
+    ST_LINESTRING(2),
+    ST_POLYGON(3),
+    ST_MULTIPOINT(4),
+    ST_MULTILINESTRING(5),
+    ST_MULTIPOLYGON(6);
+
+    private final int index;
+
+    OGCType(int index) {
+      this.index = index;
+    }
+
+    public int getIndex() {
+      return this.index;
+    }
+  }
+
+  public static OGCType[] OGCTypeLookup =
+      { OGCType.UNKNOWN, OGCType.ST_POINT, OGCType.ST_LINESTRING, OGCType.ST_POLYGON, OGCType.ST_MULTIPOINT,
+          OGCType.ST_MULTILINESTRING, OGCType.ST_MULTIPOLYGON };
+
+  public static final WritableBinaryObjectInspector geometryTransportObjectInspector =
+      PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
+
+  private static final Cache<BytesWritable, OGCGeometry> geometryCache = CacheBuilder.newBuilder().weakKeys().build();
+
+  /**
+   * @param geomref1
+   * @param geomref2
+   * @return return true if both geometries are in the same spatial reference
+   */
+  public static boolean compareSpatialReferences(BytesWritable geomref1, BytesWritable geomref2) {
+    return getWKID(geomref1) == getWKID(geomref2);
+  }
+
+  public static BytesWritable geometryToEsriShapeBytesWritable(MapGeometry mapGeometry) {
+    return serialize(mapGeometry);
+  }
+
+  public static BytesWritable geometryToEsriShapeBytesWritable(Geometry geometry, int wkid, OGCType type) {
+    return serialize(geometry, wkid, type);
+  }
+
+  public static BytesWritable geometryToEsriShapeBytesWritable(OGCGeometry geometry) {
+    return new CachedGeometryBytesWritable(geometry);
+  }
+
+  public static OGCGeometry geometryFromEsriShape(BytesWritable geomref) {
+    // always assume bytes are recycled and can't be cached by using
+    // geomref.getBytes() as the key
+    return geometryFromEsriShape(geomref, true);
+  }
+
+  public static OGCGeometry geometryFromEsriShape(BytesWritable geomref, boolean bytesRecycled) {
+
+    if (geomref == null) {
+      return null;
+    }
+
+    // this geomref might actually be a LazyGeometryBytesWritable which
+    // means we don't need to deserialize from bytes
+    if (geomref instanceof CachedGeometryBytesWritable) {
+      return ((CachedGeometryBytesWritable) geomref).getGeometry();
+    }
+
+    // if geomref bytes are recycled, we can't use the cache because every
+    // key in the cache will be the same byte array
+    if (!bytesRecycled) {
+      // check for a cache hit to previously created geometries
+      OGCGeometry cachedGeom = geometryCache.getIfPresent(geomref);
+
+      if (cachedGeom != null) {
+        return cachedGeom;
+      }
+    }
+
+    // not in cache or instance of CachedGeometryBytesWritable. now
+    // need to create the geometry from its bytes
+    int wkid = getWKID(geomref);
+    ByteBuffer shapeBuffer = getShapeByteBuffer(geomref);
+
+    //minimum for a shape, even an empty one, is the 4 byte type record
+    if (shapeBuffer.limit() < 4) {
+      return null;
+    } else {
+      if (shapeBuffer.getInt(0) == Geometry.Type.Unknown.value()) { //empty Geometry, intentional
+        return null;
+      } else {
+        SpatialReference spatialReference = null;
+        if (wkid != GeometryUtils.WKID_UNKNOWN) {
+          spatialReference = SpatialReference.create(wkid);
+        }
+
+        Geometry esriGeom = OperatorImportFromESRIShape.local().execute(0, Geometry.Type.Unknown, shapeBuffer);
+        OGCGeometry createdGeom = OGCGeometry.createFromEsriGeometry(esriGeom, spatialReference);
+
+        if (!bytesRecycled) {
+          // only add bytes to cache if we know they aren't being recycled
+          geometryCache.put(geomref, createdGeom);
+        }
+
+        return createdGeom;
+      }
+    }
+  }
+
+  /**
+   * Gets the geometry type for the given hive geometry bytes
+   *
+   * @param geomref reference to hive geometry bytes
+   * @return OGCType set in the 5th byte of the hive geometry bytes
+   */
+  public static OGCType getType(BytesWritable geomref) {
+    // SIZE_WKID is the offset to the byte that stores the type information
+    return OGCTypeLookup[geomref.getBytes()[SIZE_WKID]];
+  }
+
+  /**
+   * Sets the geometry type (in place) for the given hive geometry bytes
+   * @param geomref reference to hive geometry bytes
+   * @param type OGC geometry type
+   */
+  public static void setType(BytesWritable geomref, OGCType type) {
+    geomref.getBytes()[SIZE_WKID] = (byte) type.getIndex();
+  }
+
+  /**
+   * Gets the WKID for the given hive geometry bytes
+   *
+   * @param geomref reference to hive geometry bytes
+   * @return WKID set in the first 4 bytes of the hive geometry bytes
+   */
+  public static int getWKID(BytesWritable geomref) {
+    ByteBuffer bb = ByteBuffer.wrap(geomref.getBytes());
+    return bb.getInt(0);
+  }
+
+  /**
+   * Sets the WKID (in place) for the given hive geometry bytes
+   *
+   * @param geomref reference to hive geometry bytes
+   * @param wkid
+   */
+  public static void setWKID(BytesWritable geomref, int wkid) {
+    ByteBuffer bb = ByteBuffer.allocate(4);
+    bb.putInt(wkid);
+    System.arraycopy(bb.array(), 0, geomref.getBytes(), 0, SIZE_WKID);
+  }
+
+  public static OGCType getInferredOGCType(Geometry geom) {
+    switch (geom.getType()) {
+    case Polygon:
+      return OGCType.ST_MULTIPOLYGON;
+    case Polyline:
+      return OGCType.ST_MULTILINESTRING;
+    case MultiPoint:
+      return OGCType.ST_MULTIPOINT;
+    case Point:
+      return OGCType.ST_POINT;
+    default:
+      return OGCType.UNKNOWN;
+    }
+  }
+
+  private static ByteBuffer getShapeByteBuffer(BytesWritable geomref) {
+    byte[] geomBytes = geomref.getBytes();
+    int offset = SIZE_WKID + SIZE_TYPE;
+
+    return ByteBuffer.wrap(geomBytes, offset, geomBytes.length - offset).slice().order(ByteOrder.LITTLE_ENDIAN);
+  }
+
+  private static BytesWritable serialize(MapGeometry mapGeometry) {
+    int wkid = 0;
+
+    SpatialReference spatialRef = mapGeometry.getSpatialReference();
+
+    if (spatialRef != null) {
+      wkid = spatialRef.getID();
+    }
+
+    Geometry.Type esriType = mapGeometry.getGeometry().getType();
+    OGCType ogcType;
+
+    switch (esriType) {
+    case Point:
+      ogcType = OGCType.ST_POINT;
+      break;
+    case Polyline:
+      ogcType = OGCType.ST_LINESTRING;
+      break;
+    case Polygon:
+      ogcType = OGCType.ST_POLYGON;
+      break;
+    default:
+      ogcType = OGCType.UNKNOWN;
+    }
+
+    return serialize(mapGeometry.getGeometry(), wkid, ogcType);
+  }
+
+  private static BytesWritable serialize(OGCGeometry ogcGeometry) {
+    int wkid;
+    try {
+      wkid = ogcGeometry.SRID();
+    } catch (NullPointerException npe) {
+      wkid = 0;
+    }
+
+    OGCType ogcType;
+    String typeName;
+    try {
+      typeName = ogcGeometry.geometryType();
+
+      if (typeName.equals("Point"))
+        ogcType = OGCType.ST_POINT;
+      else if (typeName.equals("LineString"))
+        ogcType = OGCType.ST_LINESTRING;
+      else if (typeName.equals("Polygon"))
+        ogcType = OGCType.ST_POLYGON;
+      else if (typeName.equals("MultiPoint"))
+        ogcType = OGCType.ST_MULTIPOINT;
+      else if (typeName.equals("MultiLineString"))
+        ogcType = OGCType.ST_MULTILINESTRING;
+      else if (typeName.equals("MultiPolygon"))
+        ogcType = OGCType.ST_MULTIPOLYGON;
+      else
+        ogcType = OGCType.UNKNOWN;
+    } catch (NullPointerException npe) {
+      ogcType = OGCType.UNKNOWN;
+    }
+
+    return serialize(ogcGeometry.getEsriGeometry(), wkid, ogcType);
+  }
+
+  private static BytesWritable serialize(Geometry geometry, int wkid, OGCType type) {
+    if (geometry == null) {
+      return null;
+    }
+
+    // first get shape buffer for geometry
+    byte[] shape = GeometryEngine.geometryToEsriShape(geometry);
+
+    if (shape == null) {
+      return null;
+    }
+
+    byte[] shapeWithData = new byte[shape.length + SIZE_WKID + SIZE_TYPE];
+
+    System.arraycopy(shape, 0, shapeWithData, SIZE_WKID + SIZE_TYPE, shape.length);
+
+    BytesWritable hiveGeometryBytes = new BytesWritable(shapeWithData);
+
+    setWKID(hiveGeometryBytes, wkid);
+    setType(hiveGeometryBytes, type);
+
+    BytesWritable ret = new BytesWritable(shapeWithData);
+
+    return ret;
+  }
+
+  public static class CachedGeometryBytesWritable extends BytesWritable {
+    OGCGeometry cachedGeom;
+
+    public CachedGeometryBytesWritable(OGCGeometry geom) {
+      cachedGeom = geom;
+      super.set(serialize(cachedGeom));
+    }
+
+    public OGCGeometry getGeometry() {
+      return cachedGeom;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/HiveGeometry.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/HiveGeometry.java
new file mode 100644
index 00000000000..4ff1062e21e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/HiveGeometry.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+public class HiveGeometry {
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/HiveGeometryOIHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/HiveGeometryOIHelper.java
new file mode 100755
index 00000000000..c9f49250a66
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/HiveGeometryOIHelper.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.log4j.Logger;
+
+public class HiveGeometryOIHelper {
+
+  static Logger LOG = Logger.getLogger(HiveGeometryOIHelper.class);
+
+  private final PrimitiveObjectInspector oi;
+  private final int argIndex;
+  private final boolean isConstant;
+
+  OGCGeometry constantGeometry;
+
+  private HiveGeometryOIHelper(ObjectInspector oi, int argIndex) {
+    this.oi = (PrimitiveObjectInspector) oi;
+    this.argIndex = argIndex;
+
+    // constant geometries only need to be processed once and can
+    // be optimized in certain operations
+    isConstant = ObjectInspectorUtils.isConstantObjectInspector(oi);
+  }
+
+  public static HiveGeometryOIHelper create(ObjectInspector[] OIs, int argIndex) throws UDFArgumentException {
+    return create(OIs[argIndex], argIndex);
+  }
+
+  public static HiveGeometryOIHelper create(ObjectInspector oi, int argIndex) throws UDFArgumentException {
+    if (oi.getCategory() != Category.PRIMITIVE) {
+      throw new UDFArgumentException("Geometry argument must be a primitive type");
+    }
+
+    return new HiveGeometryOIHelper(oi, argIndex);
+  }
+
+  public static boolean canCreate(ObjectInspector oi) {
+		return oi.getCategory() == Category.PRIMITIVE;
+	}
+
+  /**
+   * Gets whether this geometry argument is constant.
+   *
+   * @return
+   */
+  public boolean isConstant() {
+    return isConstant;
+  }
+
+  /**
+   * Returns the cached constant geometry object.
+   *
+   * @return cache geometry, or null if not constant
+   */
+  public OGCGeometry getConstantGeometry() {
+    return constantGeometry;
+  }
+
+  /**
+   * Reads the corresponding geometry from the deferred object list
+   * or returns the cached geometry if argument is constant.
+   *
+   * @param args
+   * @return OGCPoint or null if not a point
+   * @see #getGeometry(DeferredObject[])
+   */
+  public OGCPoint getPoint(DeferredObject[] args) {
+    OGCGeometry geometry = getGeometry(args);
+
+    if (geometry instanceof OGCPoint) {
+      return (OGCPoint) geometry;
+    } else {
+      return null;
+    }
+  }
+
+  /**
+   * Reads the corresponding geometry from the deferred object list
+   * or returns the cached geometry if argument is constant.
+   *
+   * @param args
+   * @return
+   */
+  public OGCGeometry getGeometry(DeferredObject[] args) {
+    if (isConstant) {
+      if (constantGeometry == null) {
+        constantGeometry = getGeometry(args[argIndex]);
+      }
+
+      return constantGeometry;
+    } else {
+      // not constant, so we have to rebuild the geometry
+      // on every call
+      return getGeometry(args[argIndex]);
+    }
+  }
+
+  private OGCGeometry getGeometry(DeferredObject arg) {
+    Object writable;
+    try {
+      writable = oi.getPrimitiveWritableObject(arg.get());
+    } catch (HiveException e) {
+      LOG.error("Failed to get writable", e);
+      return null;
+    }
+
+    if (writable == null) {
+      return null;
+    }
+
+    switch (oi.getPrimitiveCategory()) {
+    case BINARY:
+      return getGeometryFromBytes((BytesWritable) writable);
+    case STRING:
+      return OGCGeometry.fromText(writable.toString());
+    default:
+      return null;
+    }
+  }
+
+  private BytesWritable last = null;
+
+  // always assume bytes are reused until we determine they aren't
+  private boolean bytesReused = true;
+
+  private OGCGeometry getGeometryFromBytes(BytesWritable writable) {
+
+    if (bytesReused) {
+      if (last != null && last != writable) {
+        // this assumes that the source of these bytes will either always
+        // reuse the bytes or never reuse the bytes.
+        bytesReused = false;
+      }
+      last = writable;
+    }
+
+    return GeometryUtils.geometryFromEsriShape(writable, bytesReused);
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder builder = new StringBuilder();
+
+    builder.append("HiveGeometryHelper(");
+    builder.append("constant=" + isConstant + ";");
+    builder.append(")");
+
+    return builder.toString();
+  }
+}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/LogUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/LogUtils.java
new file mode 100644
index 00000000000..5dfe783b861
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/LogUtils.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+
+public class LogUtils {
+
+  private static final int MSG_SRID_MISMATCH = 0;
+  private static final int MSG_ARGUMENTS_NULL = 1;
+  private static final int MSG_ARGUMENT_LENGTH_XY = 2;
+  private static final int MSG_MULTI_ARGUMENT_LENGTH_XY = 3;
+  private static final int MSG_INVALID_TYPE = 4;
+  private static final int MSG_INVALID_TEXT = 5;
+  private static final int MSG_INVALID_INDEX = 6;
+  private static final int MSG_INTERNAL_ERROR = 7;
+  private static final int MSG_ARGUMENT_LENGTH = 8;
+  private static final int MSG_EXCEPTION_THROWN = 9;
+  private static final int MSG_NOT_3D = 10;
+  private static final int MSG_NOT_MEASURED = 11;
+
+  private static final String[] messages =
+      { "Mismatched spatial references ('%d' <> '%d')", "Invalid arguments - one or more arguments are null.",
+          "Invalid arguments.  Expecting one or more x,y pairs.",
+          "Invalid arguments.  Expecting one or more x,y pairs in array argument %d.",
+          "Invalid geometry type.  Expecting %s but found %s", "Invalid arguments.  Ill-formed text: %s ....",
+          "Invalid index.  Expected range [%d, %d], actual index %d.", "Internal error - %s.",
+          "Invalid arguments.  Expecting one or more arguments.", "Exception thrown by %s", "Invalid argument - not 3D",
+          "Invalid argument - not measured" };
+
+  /**
+   * Log when comparing geometries in different spatial references
+   *
+   * @param logger
+   * @param geomref1
+   * @param geomref2
+   */
+  public static void Log_SRIDMismatch(Logger logger, BytesWritable geomref1, BytesWritable geomref2) {
+    logger.error(
+        String.format(messages[MSG_SRID_MISMATCH], GeometryUtils.getWKID(geomref1), GeometryUtils.getWKID(geomref2)));
+  }
+
+  public static void Log_SRIDMismatch(Logger logger, BytesWritable geomref1, int wkid2) {
+    logger.error(String.format(messages[MSG_SRID_MISMATCH], GeometryUtils.getWKID(geomref1), wkid2));
+  }
+
+  /**
+   * Log when arguments passed to evaluate are null
+   * @param logger
+   */
+  public static void Log_ArgumentsNull(Logger logger) {
+    logger.error(messages[MSG_ARGUMENTS_NULL]);
+  }
+
+  public static void Log_VariableArgumentLengthXY(Logger logger) {
+    logger.error(messages[MSG_ARGUMENT_LENGTH_XY]);
+  }
+
+  public static void Log_VariableArgumentLengthXY(Logger logger, int array_argument_index) {
+    logger.error(String.format(messages[MSG_MULTI_ARGUMENT_LENGTH_XY], array_argument_index));
+  }
+
+  public static void Log_InvalidType(Logger logger, GeometryUtils.OGCType expecting, GeometryUtils.OGCType actual) {
+    logger.error(String.format(messages[MSG_INVALID_TYPE], expecting, actual));
+  }
+
+  public static void Log_InvalidText(Logger logger, String text) {
+    int limit = text.length();
+    limit = limit > 80 ? 80 : limit;
+    logger.error(String.format(messages[MSG_INVALID_TEXT], text.substring(0, limit)));
+  }
+
+  public static void Log_InvalidIndex(Logger logger, int actual, int expMin, int expMax) {
+    logger.error(String.format(messages[MSG_INVALID_INDEX], expMin, expMax, actual));
+  }
+
+  public static void Log_InternalError(Logger logger, String text) {
+    logger.error(String.format(messages[MSG_INTERNAL_ERROR], text));
+  }
+
+  public static void Log_VariableArgumentLength(Logger logger) {
+    logger.error(messages[MSG_ARGUMENT_LENGTH]);
+  }
+
+  public static void Log_ExceptionThrown(Logger logger, String method, Exception e) {
+    logger.error(String.format(messages[MSG_EXCEPTION_THROWN], method), e);
+  }
+
+  public static void Log_Not3D(Logger logger) {
+    logger.error(messages[MSG_NOT_3D]);
+  }
+
+  public static void Log_NotMeasured(Logger logger) {
+    logger.error(messages[MSG_NOT_MEASURED]);
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_ConvexHull.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_ConvexHull.java
new file mode 100644
index 00000000000..fcad441604a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_ConvexHull.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collections;
+
+@Description(name = "ST_Aggr_ConvexHull",
+    value = "_FUNC_(ST_Geometry) - aggregate convex hull of all geometries passed",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(geometry) FROM source; -- return convex hull of all geometries in source")
+
+public class ST_Aggr_ConvexHull extends UDAF {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Aggr_ConvexHull.class.getName());
+
+  public static class AggrConvexHullBinaryEvaluator implements UDAFEvaluator {
+
+    private final int MAX_BUFFER_SIZE = 1000;
+    private final ArrayList<Geometry> geometries = new ArrayList<Geometry>(MAX_BUFFER_SIZE);
+    SpatialReference spatialRef = null;
+    int firstWKID = -2;
+
+    /*
+     * Initialize evaluator
+     */
+    @Override
+    public void init() {
+
+      if (geometries.size() > 0) {
+        geometries.clear();
+      }
+    }
+
+    /*
+     * Iterate is called once per row in a table
+     */
+    public boolean iterate(BytesWritable geomref) throws HiveException {
+
+      if (geomref == null) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return false;
+      }
+
+      if (firstWKID == -2) {
+        firstWKID = GeometryUtils.getWKID(geomref);
+        if (firstWKID != GeometryUtils.WKID_UNKNOWN) {
+          spatialRef = SpatialReference.create(firstWKID);
+        }
+      } else if (firstWKID != GeometryUtils.getWKID(geomref)) {
+        LogUtils.Log_SRIDMismatch(LOG, geomref, firstWKID);
+        return false;
+      }
+
+      addGeometryToBuffer(geomref);
+
+      return (geometries.size() != 0);
+    }
+
+    /*
+     * Merge the current state of this evaluator with the result of another evaluator's terminatePartial()
+     */
+    public boolean merge(BytesWritable other) throws HiveException {
+      // for our purposes, merge is the same as iterate
+      return iterate(other);
+    }
+
+    public BytesWritable terminatePartial() throws HiveException {
+      maybeAggregateBuffer(true);
+      if (geometries.size() == 1) {
+        OGCGeometry rslt = OGCGeometry.createFromEsriGeometry(geometries.get(0), spatialRef);
+        return GeometryUtils.geometryToEsriShapeBytesWritable(rslt);
+      } else {
+        return null;
+      }
+    }
+
+    /*
+     * Return a geometry that is the aggregation of all geometries added up until this point
+     */
+    public BytesWritable terminate() throws HiveException {
+      // for our purposes, terminate is the same as terminatePartial
+      return terminatePartial();
+    }
+
+    private void addGeometryToBuffer(BytesWritable geomref) throws HiveException {
+      OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+      addGeometryToBuffer(ogcGeometry.getEsriGeometry());
+    }
+
+    private void addGeometryToBuffer(Geometry geom) throws HiveException {
+      geometries.add(geom);
+      maybeAggregateBuffer(false);
+    }
+
+    /*
+     * If the right conditions are met (or force == true), create a convex hull of the geometries
+     * in the current buffer
+     */
+    private void maybeAggregateBuffer(boolean force) throws HiveException {
+
+      if (force || geometries.size() > MAX_BUFFER_SIZE) {
+        Geometry[] geomArray = new Geometry[geometries.size()];
+        geometries.toArray(geomArray);
+        geometries.clear();
+
+        try {
+          //LOG.trace("performing convexHull");
+          Geometry[] convexResult = GeometryEngine.convexHull(geomArray, true);
+          Collections.addAll(geometries, convexResult);  // expect one
+        } catch (Exception e) {
+          LOG.error("exception thrown", e);
+        }
+      }
+    }
+
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_Intersection.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_Intersection.java
new file mode 100644
index 00000000000..28b18d0bce8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_Intersection.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Aggr_Intersection",
+    value = "_FUNC_(ST_Geometry) - aggregate intersection of all geometries passed",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(geometry) FROM source; -- return intersection of all geometries in source")
+
+public class ST_Aggr_Intersection extends UDAF {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Aggr_Intersection.class.getName());
+
+  public static class AggrIntersectionBinaryEvaluator implements UDAFEvaluator {
+
+    private OGCGeometry isectGeom = null;
+    SpatialReference spatialRef = null;
+    int firstWKID = -2;
+
+    /*
+     * Initialize evaluator
+     */
+    @Override
+    public void init() {  // no-op
+    }
+
+    /*
+     * Iterate is called once per row in a table
+     */
+    public boolean iterate(BytesWritable geomref) throws HiveException {
+
+      if (geomref == null) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return false;
+      }
+
+      if (firstWKID == -2) {
+        firstWKID = GeometryUtils.getWKID(geomref);
+        if (firstWKID != GeometryUtils.WKID_UNKNOWN) {
+          spatialRef = SpatialReference.create(firstWKID);
+        }
+      } else if (firstWKID != GeometryUtils.getWKID(geomref)) {
+        LogUtils.Log_SRIDMismatch(LOG, geomref, firstWKID);
+        return false;
+      }
+
+      try {
+        OGCGeometry rowGeom = GeometryUtils.geometryFromEsriShape(geomref);
+        rowGeom.setSpatialReference(spatialRef);
+        if (isectGeom == null)
+          isectGeom = rowGeom;
+        else
+          isectGeom = isectGeom.intersection(rowGeom);
+        return true;
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_Aggr_Intersection: " + e);
+        return false;
+      }
+
+    }
+
+    /*
+     * Merge the current state of this evaluator with the result of another evaluator's terminatePartial()
+     */
+    public boolean merge(BytesWritable other) throws HiveException {
+      // for our purposes, merge is the same as iterate
+      return iterate(other);
+    }
+
+    /*
+     * Return a geometry that is the intersection of all geometries added up until this point
+     */
+    public BytesWritable terminatePartial() throws HiveException {
+      if (isectGeom == null) {
+        return null;
+      } else {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(isectGeom);
+      }
+    }
+
+    public BytesWritable terminate() throws HiveException {
+      // for our purposes, terminate is the same as terminatePartial
+      return terminatePartial();
+    }
+
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_Union.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_Union.java
new file mode 100644
index 00000000000..3824a51002c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Aggr_Union.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryCursor;
+import com.esri.core.geometry.ListeningGeometryCursor;
+import com.esri.core.geometry.OperatorUnion;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDAF;
+import org.apache.hadoop.hive.ql.exec.UDAFEvaluator;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Aggr_Union",
+    value = "_FUNC_(ST_Geometry) - aggregate union of all geometries passed",
+    extended = "Example:\n" + "  SELECT _FUNC_(geometry) FROM source; -- return union of all geometries in source")
+
+public class ST_Aggr_Union extends UDAF {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Aggr_Union.class.getName());
+
+  public static class AggrUnionBinaryEvaluator implements UDAFEvaluator {
+
+    SpatialReference spatialRef = null;
+    int firstWKID = -2;
+    ListeningGeometryCursor lgc = null;  // listening geometry cursor
+    GeometryCursor xgc = null;           // executing geometry cursor
+
+    /*
+     * Initialize evaluator
+     */
+    @Override
+    public void init() {  // no-op
+    }
+
+    /*
+     * Iterate is called once per row in a table
+     */
+    public boolean iterate(BytesWritable geomref) throws HiveException {
+
+      if (geomref == null) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return false;
+      }
+
+      if (xgc == null) {
+        firstWKID = GeometryUtils.getWKID(geomref);
+        if (firstWKID != GeometryUtils.WKID_UNKNOWN) {
+          spatialRef = SpatialReference.create(firstWKID);
+        }
+        // Need new geometry cursors both initially and after every terminatePartial(),
+        // because the geometry cursors can not be re-used after extracting the
+        // unioned geometry with GeometryCursor.next().
+        //Create an empty listener.
+        lgc = new ListeningGeometryCursor();
+        //Obtain union operator - after taking note of spatial reference.
+        xgc = OperatorUnion.local().execute(lgc, spatialRef, null);
+      } else if (firstWKID != GeometryUtils.getWKID(geomref)) {
+        LogUtils.Log_SRIDMismatch(LOG, geomref, firstWKID);
+        return false;
+      }
+
+      try {
+        lgc.tick(GeometryUtils.geometryFromEsriShape(geomref).getEsriGeometry());   // push
+        xgc.tock();   // tock to match tick
+        return true;
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_Aggr_Union: " + e);
+        return false;
+      }
+
+    }
+
+    /*
+     * Merge the current state of this evaluator with the result of another evaluator's terminatePartial()
+     */
+    public boolean merge(BytesWritable other) throws HiveException {
+      // for our purposes, merge is the same as iterate
+      return iterate(other);
+    }
+
+    /*
+     * Return a geometry that is the union of all geometries added up until this point
+     */
+    public BytesWritable terminatePartial() throws HiveException {
+      try {
+        Geometry rslt = xgc.next();
+        lgc = null;  // not reusable
+        xgc = null;  // not reusable
+        OGCGeometry ogeom = OGCGeometry.createFromEsriGeometry(rslt, spatialRef);
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogeom);
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_Aggr_Union: " + e);
+      }
+      return null;
+    }
+
+    /*
+     * Return a geometry that is the union of all geometries added up until this point
+     */
+    public BytesWritable terminate() throws HiveException {
+      // for our purposes, terminate is the same as terminatePartial
+      return terminatePartial();
+    }
+
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Area.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Area.java
new file mode 100755
index 00000000000..1c35c2f277a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Area.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Area",
+    value = "_FUNC_(ST_Polygon) - returns the area of polygon or multipolygon",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,4, 4,1)) FROM src LIMIT 1;  --  9.0")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Area(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow",
+//			result = "9.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Area(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) from onerow",
+//			result = "24.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Area(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow",
+//			result = "2.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Area(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Area extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Area.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    resultDouble.set(ogcGeometry.getEsriGeometry().calculateArea2D());
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsBinary.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsBinary.java
new file mode 100755
index 00000000000..ec74196e768
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsBinary.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_AsBinary",
+    value = "_FUNC_(ST_Geometry) - return Well-Known Binary (WKB) representation of geometry\n",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1, 2)) FROM onerow; -- WKB representation of POINT (1 2)\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 40, 40 30)')))) from onerow",
+//			result = "ST_LINESTRING"
+//			)
+//		}
+//	)
+
+public class ST_AsBinary extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_AsBinary.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      ByteBuffer byteBuf = ogcGeometry.asBinary();
+      byte[] byteArr = byteBuf.array();
+      return new BytesWritable(byteArr);
+    } catch (Exception e) {
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsGeoJson.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsGeoJson.java
new file mode 100755
index 00000000000..88d34e18f7f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsGeoJson.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_AsGeoJSON",
+    value = "_FUNC_(geometry) - return GeoJSON representation of geometry\n",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_Point(1.0, 2.0)) from onerow; -- {\"type\":\"Point\", \"coordinates\":[1.0, 2.0]}\n"
+        + "Note : \n" + " ST_AsGeoJSON outputs the _geometry_ contents but not _crs_.\n"
+        + " ST_AsGeoJSON requires geometry-api-java version 1.1 or later.\n")
+//@HivePdkUnitTests(
+//	cases = { 
+//		@HivePdkUnitTest(
+//			query = "select ST_AsGeoJSON(ST_point(1, 2))) from onerow",
+//			result = "{\"type\":\"Point\", \"coordinates\":[1.0, 2.0]}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsGeoJSON(ST_MultiLineString(array(1, 1, 2, 2, 3, 3), array(7,7, 8,8, 9,9))) from onerow",
+//			result = "{\"type\":\"MultiLineString\",\"coordinates\":[[[1.0,1.0],[2.0,2.0],[3.0,3.0]],[[7.0,7.0],[8.0,8.0],[9.0,9.0]]]}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsGeoJSON(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow",
+//			result = "{\"type\":\"Polygon\",\"coordinates\":[[[1.0,1.0],[1.0,4.0],[4.0,4.0],[4.0,1.0],[1.0,1.0]]]}"
+//			)
+//		}
+//	)
+
+public class ST_AsGeoJson extends ST_Geometry {
+  final Text resultText = new Text();
+  static final Logger LOG = LoggerFactory.getLogger(ST_AsGeoJson.class.getName());
+
+  public Text evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      String outJson = ogcGeometry.asGeoJson();
+      resultText.set(outJson);
+      return resultText;
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_AsGeoJSON: " + e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsJson.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsJson.java
new file mode 100755
index 00000000000..6ae59857432
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsJson.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_AsJSON",
+    value = "_FUNC_(ST_Geometry) - return JSON representation of ST_Geometry\n",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1.0, 2.0)) from onerow; -- {\"x\":1.0,\"y\":2.0}\n"
+        + "  SELECT _FUNC_(ST_SetSRID(ST_Point(1, 1), 4326)) from onerow; -- {\"x\":1.0,\"y\":1.0,\"spatialReference\":{\"wkid\":4326}}")
+//@HivePdkUnitTests(
+//	cases = { 
+//		@HivePdkUnitTest(
+//			query = "select ST_AsJSON(ST_Point(1, 2)), ST_AsJSON(ST_SetSRID(ST_Point(1, 1), 4326)) from onerow",
+//			result = "{\"x\":1.0,\"y\":2.0}	{\"x\":1.0,\"y\":1.0,\"spatialReference\":{\"wkid\":4326}}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsJSON(ST_MultiLineString(array(1, 1, 2, 2, 3, 3), array(10, 10, 11, 11, 12, 12))) from onerow",
+//			result = "{\"paths\":[[[1.0,1.0],[2.0,2.0],[3.0,3.0]],[[10.0,10.0],[11.0,11.0],[12.0,12.0]]]}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsJSON(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)), ST_AsJSON(ST_Polygon(1, 1)) from onerow",
+//			result = "{\"rings\":[[[1.0,1.0],[1.0,4.0],[4.0,4.0],[4.0,1.0],[1.0,1.0]]]}	NULL"
+//			)
+//		}
+//	)
+public class ST_AsJson extends ST_Geometry {
+  static final Logger LOG = LoggerFactory.getLogger(ST_AsJson.class.getName());
+
+  public Text evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Geometry esriGeom = ogcGeometry.getEsriGeometry();
+    int wkid = GeometryUtils.getWKID(geomref);
+    return new Text(GeometryEngine.geometryToJson(wkid, esriGeom));
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsShape.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsShape.java
new file mode 100644
index 00000000000..ece4f69db7a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsShape.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_AsShape",
+    value = "_FUNC_(ST_Geometry) - return Esri shape representation of geometry\n",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_Point(1, 2)) FROM onerow; -- Esri shape representation of POINT (1 2)\n")
+public class ST_AsShape extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_AsShape.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      // Get Esri shape representation
+      Geometry esriGeometry = ogcGeometry.getEsriGeometry();
+      byte[] esriShape = GeometryEngine.geometryToEsriShape(esriGeometry);
+      return new BytesWritable(esriShape);
+    } catch (Exception e) {
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsText.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsText.java
new file mode 100755
index 00000000000..84be547a74e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_AsText.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.WktExportFlags;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.udf.esri.GeometryUtils.OGCType;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_AsText",
+    value = "_FUNC_(ST_Geometry) - return Well-Known Text (WKT) representation of ST_Geometry\n",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1, 2)) FROM onerow;  --  POINT (1 2)\n")
+//@HivePdkUnitTests(
+//	cases = { 
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsText(ST_Point(1, 2)), ST_AsText(ST_MultiPoint(1, 2, 3, 4)) FROM onerow",
+//			result = "POINT (1 2)	MULTIPOINT ((1 2), (3 4))"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsText(ST_LineString(1, 1, 2, 2, 3, 3)) FROM onerow",
+//			result = "LINESTRING (1 1, 2 2, 3 3)"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsText(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)), ST_AsText(ST_Polygon(1, 1, 4, 1, 4, 4, 1, 4)) FROM onerow",
+//			result = "POLYGON ((4 1, 4 4, 1 4, 1 1, 4 1))	NULL"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsText(ST_MultiPolygon(array(1, 1, 1, 4, 4, 4, 4, 1), array(11, 11, 11, 14, 14, 14, 14, 11))) FROM onerow",
+//			result = "MULTIPOLYGON (((4 1, 4 4, 1 4, 1 1, 4 1)), ((14 11, 14 14, 11 14, 11 11, 14 11)))"
+//			)
+//		}
+//	)
+public class ST_AsText extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_AsText.class.getName());
+
+  public Text evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    int wktExportFlag = getWktExportFlag(GeometryUtils.getType(geomref));
+
+    try {
+      // mind: GeometryType with ST_AsText(ST_GeomFromText('MultiLineString((0 80, 0.03 80.04))'))
+      // return new Text(ogcGeometry.asText());
+      return new Text(GeometryEngine.geometryToWkt(ogcGeometry.getEsriGeometry(), wktExportFlag));
+    } catch (Exception e) {
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+  private int getWktExportFlag(OGCType type) {
+    switch (type) {
+    case ST_POLYGON:
+      return WktExportFlags.wktExportPolygon;
+    case ST_MULTIPOLYGON:
+      return WktExportFlags.wktExportMultiPolygon;
+    case ST_POINT:
+      return WktExportFlags.wktExportPoint;
+    case ST_MULTIPOINT:
+      return WktExportFlags.wktExportMultiPoint;
+    case ST_LINESTRING:
+      return WktExportFlags.wktExportLineString;
+    case ST_MULTILINESTRING:
+      return WktExportFlags.wktExportMultiLineString;
+    default:
+      return WktExportFlags.wktExportDefaults;
+    }
+  }
+}
\ No newline at end of file
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Bin.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Bin.java
new file mode 100755
index 00000000000..61e5985b0b2
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Bin.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+
+import java.util.EnumSet;
+
+@Description(name = "ST_Bin",
+    value = "_FUNC_(binsize, point) - return bin ID for given point\n")
+public class ST_Bin extends GenericUDF {
+
+  private transient HiveGeometryOIHelper geomHelper;
+  private transient boolean binSizeIsConstant;
+  private transient PrimitiveObjectInspector oiBinSize;
+  private transient BinUtils bins;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] OIs) throws UDFArgumentException {
+
+    if (OIs.length != 2) {
+      throw new UDFArgumentException("Function takes exactly 2 arguments");
+    }
+
+    if (OIs[0].getCategory() != Category.PRIMITIVE) {
+      throw new UDFArgumentException("Argument 0 must be a number - got: " + OIs[0].getCategory());
+    }
+
+    oiBinSize = (PrimitiveObjectInspector) OIs[0];
+    if (!EnumSet.of(PrimitiveCategory.DECIMAL, PrimitiveCategory.DOUBLE, PrimitiveCategory.INT, PrimitiveCategory.LONG,
+        PrimitiveCategory.SHORT, PrimitiveCategory.FLOAT).contains(oiBinSize.getPrimitiveCategory())) {
+      throw new UDFArgumentException("Argument 0 must be a number - got: " + oiBinSize.getPrimitiveCategory());
+    }
+
+    geomHelper = HiveGeometryOIHelper.create(OIs[1], 1);
+    binSizeIsConstant = ObjectInspectorUtils.isConstantObjectInspector(OIs[0]);
+
+    return PrimitiveObjectInspectorFactory.javaLongObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] args) throws HiveException {
+    double binSize = PrimitiveObjectInspectorUtils.getDouble(args[0].get(), oiBinSize);
+
+    if (!binSizeIsConstant || bins == null) {
+      bins = new BinUtils(binSize);
+    }
+
+    OGCPoint point = geomHelper.getPoint(args);
+
+    if (point == null) {
+      return null;
+    }
+
+    return bins.getId(point.X(), point.Y());
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    assert (args.length == 2);
+    return String.format("st_bin(%s,%s)", args[0], args[1]);
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_BinEnvelope.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_BinEnvelope.java
new file mode 100755
index 00000000000..7bab0b2f333
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_BinEnvelope.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.udf.esri.GeometryUtils.OGCType;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+
+import java.util.EnumSet;
+
+@Description(name = "ST_BinEnvelope",
+    value = "_FUNC_(binsize, point) - return bin envelope for given point\n"
+        + "_FUNC_(binsize, binid) - return bin envelope for given bin ID\n")
+public class ST_BinEnvelope extends GenericUDF {
+  private transient boolean binSizeIsConstant;
+  private transient PrimitiveObjectInspector oiBinSize;
+  private transient BinUtils bins;
+
+  private transient PrimitiveObjectInspector oiBinId;
+  private transient HiveGeometryOIHelper binPoint;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] OIs) throws UDFArgumentException {
+
+    if (OIs.length != 2) {
+      throw new UDFArgumentException("Function takes exactly 2 arguments");
+    }
+
+    if (!isPrimitiveNumber(OIs[0])) {
+      throw new UDFArgumentException("Argument 0 must be a number");
+    }
+
+    oiBinSize = (PrimitiveObjectInspector) OIs[0];
+
+    if (isPrimitiveNumber(OIs[1])) {
+      oiBinId = (PrimitiveObjectInspector) OIs[1];
+    } else if (HiveGeometryOIHelper.canCreate(OIs[1])) {
+      binPoint = HiveGeometryOIHelper.create(OIs, 1);
+    } else {
+      throw new UDFArgumentException("Argument 1 must be a number or valid geometry type");
+    }
+
+    return GeometryUtils.geometryTransportObjectInspector;
+  }
+
+  private boolean isPrimitiveNumber(ObjectInspector oi) {
+    if (oi.getCategory() != Category.PRIMITIVE) {
+      return false;
+    }
+
+    return EnumSet.of(PrimitiveCategory.DOUBLE, PrimitiveCategory.INT, PrimitiveCategory.LONG, PrimitiveCategory.SHORT,
+        PrimitiveCategory.FLOAT, PrimitiveCategory.DECIMAL)
+        .contains(((PrimitiveObjectInspector) oi).getPrimitiveCategory());
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] args) throws HiveException {
+    double binSize = PrimitiveObjectInspectorUtils.getDouble(args[0].get(), oiBinSize);
+
+    if (!binSizeIsConstant || bins == null) {
+      bins = new BinUtils(binSize);
+    }
+
+    Envelope env = new Envelope();
+
+    if (oiBinId != null) {
+      // argument 1 is a number, attempt to get the envelope with bin ID
+      if (args[1].get() == null) {
+        // null bin ID argument usually means the source point was null or failed to parse
+        return null;
+      }
+
+      long binId = PrimitiveObjectInspectorUtils.getLong(args[1].get(), oiBinId);
+      bins.queryEnvelope(binId, env);
+    } else {
+      // argument 1 is a geometry, attempt to get the envelope with a point
+      OGCPoint point = binPoint.getPoint(args);
+
+      if (point == null) {
+        return null;
+      }
+
+      bins.queryEnvelope(point.X(), point.Y(), env);
+    }
+
+    return GeometryUtils.geometryToEsriShapeBytesWritable(env, 0, OGCType.ST_POLYGON);
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    assert (args.length == 2);
+    return String.format("st_binenvelope(%s,%s)", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Boundary.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Boundary.java
new file mode 100755
index 00000000000..73d6eee7736
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Boundary.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCMultiLineString;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Boundary",
+    value = "_FUNC_(ST_Geometry) - boundary of the input ST_Geometry",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_LineString(0,1, 1,0))) FROM src LIMIT 1;   -- MULTIPOINT((1 0),(0 1))\n"
+        + "  SELECT _FUNC_(ST_Polygon(1,1, 4,1, 1,4)) FROM src LIMIT 1;  -- LINESTRING(1 1, 4 1, 1 4, 1 1)\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_Boundary(ST_Linestring('linestring (10 10, 20 20)'))) from onerow",
+//			result = "ST_MULTIPOINT"
+//			 ),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_Boundary(ST_Linestring('linestring (10 10, 20 20)')), ST_GeomFromText('multipoint ((10 10), (20 20))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+// The boundary of a point (or multipoint) is the empty set  OGC 4.18, 6.1.5
+// The boundary of a closed curve is empty; non-closed curve, its 2 end points  OGC 6.1.6.1
+// The boundary of a surface is the set of closed curves that form its limits  OGC 4.21
+
+public class ST_Boundary extends ST_GeometryProcessing {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Boundary.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    try {
+      OGCGeometry boundGeom = ogcGeometry.boundary();
+      if (boundGeom.geometryType().equals("MultiLineString") && ((OGCMultiLineString) boundGeom).numGeometries() == 1)
+        boundGeom = ((OGCMultiLineString) boundGeom).geometryN(0);  // match ST_Boundary/SQL-RDBMS
+      return GeometryUtils.geometryToEsriShapeBytesWritable(boundGeom);
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_Boundary: " + e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Buffer.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Buffer.java
new file mode 100644
index 00000000000..98f395bbb04
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Buffer.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Buffer",
+    value = "_FUNC_(ST_Geometry, distance) - ST_Geometry buffered by distance",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_Point(0, 0), 1) FROM src LIMIT 1;   -- polygon approximating a unit circle\n") public class ST_Buffer
+    extends ST_GeometryProcessing {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_Buffer.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geometryref1, DoubleWritable distance) {
+    if (geometryref1 == null || geometryref1.getLength() == 0 || distance == null) {
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryref1);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry bufferedGeometry = ogcGeometry.buffer(distance.get());
+    // TODO persist type information (polygon vs multipolygon)
+    return GeometryUtils.geometryToEsriShapeBytesWritable(bufferedGeometry);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Centroid.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Centroid.java
new file mode 100644
index 00000000000..430134fe8f3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Centroid.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Centroid",
+    value = "_FUNC_(geometry) - returns the centroid of the geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_GeomFromText('point (2 3)'));  -- POINT(2 3)\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('multipoint ((0 0), (1 1), (1 -1), (6 0))'));  -- POINT(2 0)\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('linestring ((0 0, 6 0))'));  -- POINT(3 0)\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('linestring ((0 0, 2 4, 6 8))'));  -- POINT(3 4)\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('polygon ((0 0, 0 8, 8 8, 8 0, 0 0))'));  -- POINT(4 4)\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('polygon ((1 1, 5 1, 3 4))'));  -- POINT(3 2)\n")
+
+public class ST_Centroid extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_PointN.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeometry.centroid());
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Contains.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Contains.java
new file mode 100755
index 00000000000..bc0d58a9d4c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Contains.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorContains;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+
+@UDFType(deterministic = true) @Description(name = "ST_Contains",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 contains geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(st_polygon(1,1, 1,4, 4,4, 4,1), st_point(2, 3) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_polygon(1,1, 1,4, 4,4, 4,1), st_point(8, 8) from src LIMIT 1;  -- return false") public class ST_Contains
+    extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorContains.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s contains %s", args[0], args[1]);
+  }
+}
+
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_ConvexHull.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_ConvexHull.java
new file mode 100755
index 00000000000..8371882ace3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_ConvexHull.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.udf.esri.GeometryUtils.OGCType;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_ConvexHull",
+    value = "_FUNC_(ST_Geometry, ST_Geometry, ...) - returns an ST_Geometry as the convex hull of the supplied ST_Geometries",
+    extended = "Example: SELECT ST_AsText(ST_ConvexHull(ST_Point(0, 0), ST_Point(0, 1), ST_Point(1, 1))) FROM onerow;\n"
+        + "MULTIPOLYGON (((0 0, 1 1, 0 1, 0 0)))")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_AsText(ST_ConvexHull(ST_Point(0, 0), ST_Point(0, 1), ST_Point(1, 1))) FROM onerow",
+//			result = "MULTIPOLYGON (((0 0, 1 1, 0 1, 0 0)))"
+//			)
+//		}
+//	)
+
+public class ST_ConvexHull extends ST_GeometryProcessing {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_ConvexHull.class.getName());
+
+  public BytesWritable evaluate(BytesWritable... geomrefs) {
+
+    // validate arguments
+    if (geomrefs == null || geomrefs.length < 1) {
+      // LogUtils.Log_VariableArgumentLength(LOG);
+      return null;
+    }
+
+    int firstWKID = 0;
+
+    // validate spatial references and geometries first
+    for (int i = 0; i < geomrefs.length; i++) {
+
+      BytesWritable geomref = geomrefs[i];
+
+      if (geomref == null || geomref.getLength() == 0) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return null;
+      }
+
+      if (i == 0) {
+        firstWKID = GeometryUtils.getWKID(geomref);
+      } else if (firstWKID != GeometryUtils.getWKID(geomref)) {
+        LogUtils.Log_SRIDMismatch(LOG, geomrefs[0], geomref);
+        return null;
+      }
+    }
+
+    // now build geometry array to pass to GeometryEngine.union
+    Geometry[] geomsToProcess = new Geometry[geomrefs.length];
+
+    for (int i = 0; i < geomrefs.length; i++) {
+      //HiveGeometry hiveGeometry = GeometryUtils.geometryFromEsriShape(geomrefs[i]);
+      OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomrefs[i]);
+
+      if (ogcGeometry == null) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return null;
+      }
+
+      geomsToProcess[i] = ogcGeometry.getEsriGeometry();
+    }
+
+    try {
+
+      Geometry[] geomResult = GeometryEngine.convexHull(geomsToProcess, true);
+
+      if (geomResult.length != 1) {
+        return null;
+      }
+
+      Geometry merged = geomResult[0];
+
+      // we have to infer the type of the differenced geometry because we don't know
+      // if it's going to end up as a single or multi-part geometry
+      OGCType inferredType = GeometryUtils.getInferredOGCType(merged);
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(merged, firstWKID, inferredType);
+    } catch (Exception e) {
+      LogUtils.Log_ExceptionThrown(LOG, "GeometryEngine.convexHull", e);
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_CoordDim.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_CoordDim.java
new file mode 100755
index 00000000000..376628de59c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_CoordDim.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_CoordDim",
+    value = "_FUNC_(geometry) - return count of coordinate components",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 2\n"
+        + "  > SELECT _FUNC_(ST_PointZ(1.5,2.5, 3) FROM src LIMIT 1;  -- 3\n"
+        + "  > SELECT _FUNC_(ST_Point(1.5, 2.5, 3., 4.)) FROM src LIMIT 1;  -- 4\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_CoordDim(ST_Point(0., 3.)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_CoordDim(ST_PointZ(0., 3., 1)) from onerow",
+//			result = "3"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_CoordDim(ST_Point(0., 3., 1., 2.)) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_CoordDim(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_CoordDim extends ST_GeometryAccessor {
+  final IntWritable resultInt = new IntWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Is3D.class.getName());
+
+  public IntWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      return null;
+    }
+
+    resultInt.set(ogcGeometry.coordinateDimension());
+    return resultInt;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Crosses.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Crosses.java
new file mode 100755
index 00000000000..39d48cabccb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Crosses.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorCrosses;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Crosses",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 crosses geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(st_linestring(0,0, 1,1), st_linestring(1,0, 0,1)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_linestring(2,0, 2,3), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_linestring(0,2, 0,1), ST_linestring(2,0, 1,0)) from src LIMIT 1;  -- return false") public class ST_Crosses
+    extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorCrosses.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s crosses %s", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Difference.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Difference.java
new file mode 100755
index 00000000000..c8847b11909
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Difference.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Difference",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return the difference of ST_Geometry1 - ST_Geometry2",
+    extended = "Examples:\n"
+        + " > SELECT ST_AsText(ST_Difference(ST_MultiPoint(1, 1, 1.5, 1.5, 2, 2), ST_Point(1.5, 1.5))) FROM onerow; \n"
+        + " MULTIPOINT (1 1, 2 2)\n"
+        + " > SELECT ST_AsText(ST_Difference(ST_Polygon(0, 0, 0, 10, 10, 10, 10, 0), ST_Polygon(0, 0, 0, 5, 5, 5, 5, 0))) from onerow;\n"
+        + " MULTIPOLYGON (((10 0, 10 10, 0 10, 0 5, 5 5, 5 0, 10 0)))\n\n")
+//@HivePdkUnitTests(
+//		cases = {
+//				@HivePdkUnitTest(
+//						query = "SELECT ST_AsText(ST_Difference(ST_MultiPoint(1, 1, 1.5, 1.5, 2, 2), ST_Point(1.5, 1.5))) FROM onerow",
+//						result = "MULTIPOINT (1 1, 2 2)"
+//						),
+//				@HivePdkUnitTest(
+//						query = "SELECT ST_AsText(ST_Difference(ST_Polygon(0, 0, 0, 10, 10, 10, 10, 0), ST_Polygon(0, 0, 0, 5, 5, 5, 5, 0))) from onerow",
+//						result = "MULTIPOLYGON (((10 0, 10 10, 0 10, 0 5, 5 5, 5 0, 10 0)))"
+//						)
+//			}
+//		)
+public class ST_Difference extends ST_GeometryProcessing {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_Difference.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) {
+    if (geometryref1 == null || geometryref2 == null || geometryref1.getLength() == 0
+        || geometryref2.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
+      LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
+      return null;
+    }
+
+    OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
+    OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
+    if (ogcGeom1 == null || ogcGeom2 == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry diffGeometry = ogcGeom1.difference(ogcGeom2);
+
+    // we have to infer the type of the differenced geometry because we don't know
+    // if it's going to end up as a single or multi-part geometry
+    // OGCType inferredType = GeometryUtils.getInferredOGCType(diffGeometry.getEsriGeometry());
+
+    return GeometryUtils.geometryToEsriShapeBytesWritable(diffGeometry);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Dimension.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Dimension.java
new file mode 100755
index 00000000000..3798b91e882
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Dimension.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Dimension",
+    value = "_FUNC_(geometry) - return spatial dimension of geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 0\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- 1\n"
+        + "  > SELECT _FUNC_(ST_Polygon(2,0, 2,3, 3,0)) FROM src LIMIT 1;  -- 2\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(ST_Point(0,0)) from onerow",
+//			result = "0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(ST_Polygon(1.5,2.5, 3.0,2.2, 2.2,1.1)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(ST_MultiPoint(0,0, 2,2)) from onerow",
+//			result = "0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Dimension(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Dimension extends ST_GeometryAccessor {
+  final IntWritable resultInt = new IntWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Dimension.class.getName());
+
+  public IntWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    resultInt.set(ogcGeometry.dimension());
+    return resultInt;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Disjoint.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Disjoint.java
new file mode 100755
index 00000000000..50b70397b11
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Disjoint.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorDisjoint;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Disjoint",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return true if ST_Geometry1 intersects ST_Geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(ST_LineString(0,0, 0,1), ST_LineString(1,1, 1,0)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(ST_LineString(0,0, 1,1), ST_LineString(1,0, 0,1)) from src LIMIT 1;  -- return false\n")
+
+public class ST_Disjoint extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorDisjoint.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s and %s are disjoint", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Distance.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Distance.java
new file mode 100755
index 00000000000..ff8f80606b8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Distance.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Distance",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - returns the distance between 2 ST_Geometry objects",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(0.0,0.0), ST_Point(3.0,4.0)) FROM src LIMIT 1;  --  5.0")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Distance(ST_Point(0.0,0.0), ST_Point(3.0,4.0)) from onerow",
+//			result = "5.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Distance(ST_LineString(0,0, 1,1), ST_LineString(2,1, 3,0)) from onerow",
+//			result = "11"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Distance(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Distance extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Distance.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) {
+    if (geometryref1 == null || geometryref2 == null || geometryref1.getLength() == 0
+        || geometryref2.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
+      LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
+      return null;
+    }
+
+    OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
+    OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
+    if (ogcGeom1 == null || ogcGeom2 == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      resultDouble.set(ogcGeom1.distance(ogcGeom2));
+      return resultDouble;
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_Distance: " + e);
+      return null;
+    }
+
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_EndPoint.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_EndPoint.java
new file mode 100644
index 00000000000..637861fdc22
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_EndPoint.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.MultiPath;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_EndPoint",
+    value = "_FUNC_(geometry) - returns the last point of an ST_Linestring",
+    extended = "Example:\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- POINT(3.0 2.0)\n")
+
+public class ST_EndPoint extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_EndPoint.class.getName());
+
+  /**
+   * Return the last point of the ST_Linestring.
+   * @param geomref hive geometry bytes
+   * @return byte-reference of the last ST_Point
+   */
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_LINESTRING) {
+      MultiPath lines = (MultiPath) (ogcGeometry.getEsriGeometry());
+      int wkid = GeometryUtils.getWKID(geomref);
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      return GeometryUtils.geometryToEsriShapeBytesWritable(
+          OGCGeometry.createFromEsriGeometry(lines.getPoint(lines.getPointCount() - 1), spatialReference));
+    } else {
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_EnvIntersects.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_EnvIntersects.java
new file mode 100755
index 00000000000..8dbd50baf61
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_EnvIntersects.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_EnvIntersects",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return true if the envelopes of ST_Geometry1 and ST_Geometry2 intersect",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(ST_LineString(0,0, 1,1), ST_LineString(1,3, 2,2)) from src LIMIT 1;  -- return false\n"
+        + "SELECT _FUNC_(ST_LineString(0,0, 2,2), ST_LineString(1,0, 3,2)) from src LIMIT 1;  -- return true\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_EnvIntersects(ST_LineString(0,0, 1,1), ST_LineString(1,3, 2,2)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_EnvIntersects(ST_LineString(0,0, 2,2), ST_LineString(1,0, 3,2)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_EnvIntersects(null, ST_LineString(0,0, 1,1)) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_EnvIntersects extends ST_Geometry {
+
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_EnvIntersects.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) {
+    if (geometryref1 == null || geometryref2 == null || geometryref1.getLength() == 0
+        || geometryref2.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
+      LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
+      return null;
+    }
+
+    OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
+    OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
+    if (ogcGeom1 == null || ogcGeom2 == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Geometry geometry1 = ogcGeom1.getEsriGeometry();
+    Geometry geometry2 = ogcGeom2.getEsriGeometry();
+    Envelope env1 = new Envelope(), env2 = new Envelope();
+    geometry1.queryEnvelope(env1);
+    geometry2.queryEnvelope(env2);
+
+    resultBoolean.set(env1.isIntersecting(env2));
+    return resultBoolean;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Envelope.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Envelope.java
new file mode 100755
index 00000000000..4e2b62f2046
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Envelope.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Envelope",
+    value = "_FUNC_(ST_Geometry) - the envelope of the ST_Geometry",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(ST_LineString(0,0, 2,2)) from src LIMIT 1;  -- POLYGON ((0 0, 2 0, 2 2, 0 2, 0 0))\n"
+        + "SELECT _FUNC_(ST_Polygon(2,0, 2,3, 3,0)) from src LIMIT 1;  -- POLYGON ((2 0, 3 0, 3 3, 2 3, 2 0))\n"
+        + "OGC Compliance Notes : \n" + " In the case of a point or a vertical or horizontal line,"
+        + " ST_Envelope may either apply a tolerance or return an empty envelope.")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_EnvIntersects(ST_LineString(0,0, 1,1), ST_LineString(1,3, 2,2)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_EnvIntersects(ST_LineString(0,0, 2,2), ST_LineString(1,0, 3,2)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_EnvIntersects(null, ST_LineString(0,0, 1,1)) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Envelope extends ST_GeometryProcessing {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Envelope.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geometryref) {
+    if (geometryref == null || geometryref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    int wkid = GeometryUtils.getWKID(geometryref);
+    SpatialReference spatialReference = null;
+    if (wkid != GeometryUtils.WKID_UNKNOWN) {
+      spatialReference = SpatialReference.create(wkid);
+    }
+    Envelope envBound = new Envelope();
+    ogcGeometry.getEsriGeometry().queryEnvelope(envBound);
+    return GeometryUtils
+        .geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(envBound, spatialReference));
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Equals.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Equals.java
new file mode 100755
index 00000000000..170d8624541
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Equals.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorEquals;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Equals",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 equals geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(st_linestring(0,0, 1,1), st_linestring(1,1, 0,0)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_linestring(0,0, 1,1), st_linestring(1,0, 0,1)) from src LIMIT 1;  -- return false\n") public class ST_Equals
+    extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorEquals.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s equals %s", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_ExteriorRing.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_ExteriorRing.java
new file mode 100755
index 00000000000..29ba1eee34a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_ExteriorRing.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCLineString;
+import com.esri.core.geometry.ogc.OGCPolygon;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.reflect.Method;
+
+@Description(name = "ST_ExteriorRing",
+    value = "_FUNC_(polygon) - return linestring which is the exterior ring of the polygon",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,1)) FROM src LIMIT 1;  -- LINESTRING(1 1, 4 1, 1 4, 1 1)\n"
+        + "  SELECT _FUNC_(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) FROM src LIMIT 1;  -- LINESTRING (8 0, 0 8, 0 0, 8 0)\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_ExteriorRing(ST_Polygon('polygon ((1 1, 4 1, 1 4))')), ST_LineString('linestring(1 1, 4 1, 1 4, 1 1)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_ExteriorRing(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')), ST_LineString('linestring(0 0, 8 0, 0 8, 0 0)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_ExteriorRing(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_ExteriorRing extends ST_GeometryProcessing {
+  static final Logger LOG = LoggerFactory.getLogger(ST_ExteriorRing.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_POLYGON) {
+      Method extMethod;
+      try {
+        // expect to streamline with updated geometry-api
+        // OGCLineString extRing = ((OGCPolygon)(ogcGeometry)).exteriorRing();
+        extMethod = OGCPolygon.class.getMethod("exteriorRing");
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_ExteriorRing: " + e);
+        try {
+          extMethod = OGCPolygon.class.getMethod("exterorRing");
+        } catch (Exception x) {
+          LogUtils.Log_InternalError(LOG, "ST_ExteriorRing: " + x);
+          return null;
+        }
+      }
+      try {
+        OGCLineString extRing = (OGCLineString) (extMethod.invoke(ogcGeometry));
+        return GeometryUtils.geometryToEsriShapeBytesWritable(extRing);
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_ExteriorRing: " + e);
+        return null;
+      }
+    } else {
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeodesicLengthWGS84.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeodesicLengthWGS84.java
new file mode 100755
index 00000000000..0d351d00916
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeodesicLengthWGS84.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.MultiPath;
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeodesicLengthWGS84",
+    value = "_FUNC_(line) - returns distance along line on WGS84 spheroid, in meters, for geographic coordinates",
+    extended = "Requires the geometry to be in in WGS84 spatial reference, else returns NULL\nExample:\n"
+        + " SELECT _FUNC_(ST_SetSRID(ST_Linestring(0.0,0.0, 0.3,0.4), 4326)) FROM src LIMIT 1; -- 55km\n"
+        + " SELECT _FUNC_(ST_GeomFromText('MultiLineString((0.0 80.0, 0.3 80.4))', 4326)) FROM src LIMIT 1; -- 45km\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select substr(ST_GeodesicLengthWGS84(ST_GeomFromText('LineString(0 0, 0.03 0.04)', 4326)), 1, 5) from onerow",
+//			result = "5542."
+//			),
+//		@HivePdkUnitTest(
+//			query = "select substr(ST_GeodesicLengthWGS84(ST_GeomFromText('MultiLineString((0 80, 0.03 80.04))', 4326)), 1, 5) from onerow",
+//			result = "4503."
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Length(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_GeodesicLengthWGS84 extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeodesicLengthWGS84.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    int WGS84 = 4326;
+    if (GeometryUtils.getWKID(geomref) != WGS84) {
+      LogUtils.Log_SRIDMismatch(LOG, geomref, WGS84);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Geometry esriGeom = ogcGeometry.getEsriGeometry();
+    switch (esriGeom.getType()) {
+    case Point:
+    case MultiPoint:
+      resultDouble.set(0.);
+      break;
+    default:
+      MultiPath lines = (MultiPath) (esriGeom);
+      int nPath = lines.getPathCount();
+      double length = 0.;
+      for (int ix = 0; ix < nPath; ix++) {
+        int curPt = lines.getPathStart(ix);
+        int pastPt = lines.getPathEnd(ix);
+        Point fromPt = lines.getPoint(curPt);
+        Point toPt = null;
+        for (int vx = curPt + 1; vx < pastPt; vx++) {
+          toPt = lines.getPoint(vx);
+          length += GeometryEngine.geodesicDistanceOnWGS84(fromPt, toPt);
+          fromPt = toPt;
+        }
+      }
+      resultDouble.set(length);
+      break;
+    }
+
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomCollection.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomCollection.java
new file mode 100755
index 00000000000..7d5bea4b997
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomCollection.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeomCollection",
+    value = "_FUNC_(wkt) - construct a multi-part ST_Geometry from OGC well-known text",
+    extended = "Example:\n"
+        + "  > SELECT _FUNC_('multipoint ((1 0), (2 3))') FROM src LIMIT 1;  -- constructs ST_MultiPoint\n"
+        + "OGC Compliance Notes : \n"
+        + " ST_GeomCollection on Hive does not support collections - only multi-part geometries.\n"
+        + "ST_GeomCollection('POINT(1 1), LINESTRING(2 0,3 0)') -- not supported\n")
+
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_GeomCollection('MULTIPOINT ((10 40), (40 30))'), ST_GeomFromText('MULTIPOINT ((10 40), (40 30))')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_GeomCollection('multilinestring ((2 4, 10 10), (20 20, 7 8))'), ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_GeomCollection('multipolygon (((3 3, 4 6, 5 3, 3 3)),((8 24, 9 25, 1 28, 8 24)))'), ST_GeomFromText('multipolygon (((3 3, 4 6, 5 3, 3 3)),((8 24, 9 25, 1 28, 8 24)))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_GeomCollection extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeomCollection.class.getName());
+
+  public BytesWritable evaluate(Text wkt) throws UDFArgumentException {
+    return evaluate(wkt, 0);
+  }
+
+  public BytesWritable evaluate(Text wkwrap, int wkid) throws UDFArgumentException {
+
+    String wkt = wkwrap.toString();
+
+    try {
+      Geometry geomObj = GeometryEngine.geometryFromWkt(wkt, 0, Geometry.Type.Unknown);
+      SpatialReference spatialReference = null;  // Idea: OGCGeometry.setSpatialReference after .fromText
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      OGCGeometry ogcObj = OGCGeometry.createFromEsriGeometry(geomObj, spatialReference);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromGeoJson.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromGeoJson.java
new file mode 100644
index 00000000000..24bd8fb7c51
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromGeoJson.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeomFromGeoJSON",
+    value = "_FUNC_(json) - construct an ST_Geometry from GeoJSON",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_('{\"type\":\"Point\", \"coordinates\":[1.2, 2.4]}') FROM src LIMIT 1;  -- constructs ST_Point\n"
+        + "  SELECT _FUNC_('{\"type\":\"LineString\", \"coordinates\":[[1,2], [3,4]]}') FROM src LIMIT 1;  -- constructs ST_LineString\n")
+
+public class ST_GeomFromGeoJson extends GenericUDF {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeomFromGeoJson.class.getName());
+
+  ObjectInspector jsonOI;
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    DeferredObject jsonDeferredObject = arguments[0];
+
+    String json = null;
+
+    if (jsonOI.getCategory() == Category.STRUCT) {
+      //StructObjectInspector structOI = (StructObjectInspector)jsonOI;
+
+      // TODO support structs
+    } else {
+      PrimitiveObjectInspector primOI = (PrimitiveObjectInspector) jsonOI;
+      json = (String) primOI.getPrimitiveJavaObject(jsonDeferredObject.get());
+    }
+
+    try {
+      OGCGeometry ogcGeom = OGCGeometry.fromGeoJson(json);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom);
+    } catch (Exception e) {
+      LogUtils.Log_InvalidText(LOG, json);
+    }
+
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    StringBuilder sb = new StringBuilder();
+    sb.append(this.getClass().getName());
+    String delim = "(";
+    for (String arg : args) {
+      sb.append(delim).append(arg);
+      delim = ", ";
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+
+    if (arguments.length != 1) {
+      throw new UDFArgumentLengthException("ST_GeomFromJson takes only one argument");
+    }
+
+    ObjectInspector argJsonOI = arguments[0];
+
+    if (argJsonOI.getCategory() == Category.PRIMITIVE) {
+      PrimitiveObjectInspector poi = (PrimitiveObjectInspector) argJsonOI;
+
+      if (poi.getPrimitiveCategory() != PrimitiveCategory.STRING) {
+        throw new UDFArgumentTypeException(0,
+            "ST_GeomFromJson argument category must be either a string primitive or struct");
+      }
+    } else if (argJsonOI.getCategory() != Category.STRUCT) {
+
+    } else {
+      throw new UDFArgumentTypeException(0,
+          "ST_GeomFromJson argument category must be either a string primitive or struct");
+    }
+
+    jsonOI = argJsonOI;
+
+    return GeometryUtils.geometryTransportObjectInspector;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromJson.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromJson.java
new file mode 100644
index 00000000000..74f6a533bf3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromJson.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.core.JsonFactory;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+
+@Description(name = "ST_GeomFromJSON",
+    value = "_FUNC_(json) - construct an ST_Geometry from Esri JSON",
+    extended = "Example:\n" + "  SELECT _FUNC_('{\"x\":0.0,\"y\":0.0}') FROM src LIMIT 1;  -- constructs ST_Point\n")
+
+public class ST_GeomFromJson extends GenericUDF {
+
+  static final JsonFactory jsonFactory = new JsonFactory();
+  ObjectInspector jsonOI;
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    DeferredObject jsonDeferredObject = arguments[0];
+
+    String json = null;
+
+    if (jsonOI.getCategory() == Category.STRUCT) {
+      //StructObjectInspector structOI = (StructObjectInspector)jsonOI;
+
+      // TODO support structs
+    } else {
+      PrimitiveObjectInspector primOI = (PrimitiveObjectInspector) jsonOI;
+      json = (String) primOI.getPrimitiveJavaObject(jsonDeferredObject.get());
+    }
+
+    try {
+      OGCGeometry ogcGeom = OGCGeometry.fromJson(json);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom);
+    } catch (Exception e) {
+
+    }
+
+    return null;
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    StringBuilder sb = new StringBuilder();
+    sb.append(this.getClass().getName());
+    String delim = "(";
+    for (String arg : args) {
+      sb.append(delim).append(arg);
+      delim = ", ";
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+
+    if (arguments.length != 1) {
+      throw new UDFArgumentLengthException("ST_GeomFromJson takes only one argument");
+    }
+
+    ObjectInspector argJsonOI = arguments[0];
+
+    if (argJsonOI.getCategory() == Category.PRIMITIVE) {
+      PrimitiveObjectInspector poi = (PrimitiveObjectInspector) argJsonOI;
+
+      if (poi.getPrimitiveCategory() != PrimitiveCategory.STRING) {
+        throw new UDFArgumentTypeException(0,
+            "ST_GeomFromJson argument category must be either a string primitive or struct");
+      }
+    } else if (argJsonOI.getCategory() != Category.STRUCT) {
+
+    } else {
+      throw new UDFArgumentTypeException(0,
+          "ST_GeomFromJson argument category must be either a string primitive or struct");
+    }
+
+    jsonOI = argJsonOI;
+
+    return GeometryUtils.geometryTransportObjectInspector;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromShape.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromShape.java
new file mode 100644
index 00000000000..fa090a301b2
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromShape.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import org.apache.hadoop.hive.ql.udf.esri.GeometryUtils.OGCType;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeomFromShape",
+    value = "_FUNC_(shape) - construct ST_Geometry from Esri shape representation of geometry\n",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsShape(ST_Point(1, 2))); -- constructs ST_Point\n") public class ST_GeomFromShape
+    extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeomFromShape.class.getName());
+
+  public BytesWritable evaluate(BytesWritable shape) throws UDFArgumentException {
+    return evaluate(shape, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable shape, int wkid) throws UDFArgumentException {
+    try {
+      Geometry geometry = GeometryEngine.geometryFromEsriShape(shape.getBytes(), Geometry.Type.Unknown);
+      switch (geometry.getType()) {
+      case Point:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_POINT);
+
+      case MultiPoint:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_MULTIPOINT);
+
+      case Line:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_LINESTRING);
+
+      case Polyline:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_MULTILINESTRING);
+
+      case Envelope:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_POLYGON);
+
+      case Polygon:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.ST_MULTIPOLYGON);
+
+      default:
+        return GeometryUtils.geometryToEsriShapeBytesWritable(geometry, wkid, OGCType.UNKNOWN);
+      }
+    } catch (Exception e) {
+      LogUtils.Log_ExceptionThrown(LOG, "geom-from-shape", e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromText.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromText.java
new file mode 100755
index 00000000000..6b2d1abdb32
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromText.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeomFromText",
+    value = "_FUNC_(wkt) - construct an ST_Geometry from OGC well-known text",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_('linestring (1 0, 2 3)') FROM src LIMIT 1;  -- constructs ST_Linestring\n"
+        + "  SELECT _FUNC_('multipoint ((1 0), (2 3))') FROM src LIMIT 1;  -- constructs ST_MultiPoint\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_AsText(ST_GeomFromText('point (10.02 20.01)')) from onerow",
+//			result = "POINT (10.02 20.01)"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_AsText(ST_GeomFromText('linestring (10 10, 20 20)')) from onerow",
+//			result = "LINESTRING (10 10, 20 20)"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_AsText(ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))')) from onerow",
+//			result = "POLYGON ((0 0, 0 10, 10 10, 0 0))"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_AsText(ST_GeomFromText('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))')) from onerow",
+//			result = "MULTIPOINT (10 40, 40 30, 20 20, 30 10)"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_AsText(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow",
+//			result = "MULTILINESTRING ((2 4, 10 10), (20 20, 7 8))"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_AsText(ST_GeomFromText('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))')) from onerow",
+//			result = "MULTIPOLYGON (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))"
+//			)
+//		}
+//	)
+
+public class ST_GeomFromText extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeomFromText.class.getName());
+
+  public BytesWritable evaluate(Text wkt) throws UDFArgumentException {
+    return evaluate(wkt, 0);
+  }
+
+  public BytesWritable evaluate(Text wkwrap, int wkid) throws UDFArgumentException {
+
+    String wkt = wkwrap.toString();
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(spatialReference);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromWKB.java
new file mode 100755
index 00000000000..f9631c07fe7
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeomFromWKB.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_GeomFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_Geometry from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('linestring (1 0, 2 3)'))) FROM src LIMIT 1;  -- constructs ST_Linestring\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multipoint ((1 0), (2 3))'))) FROM src LIMIT 1;  -- constructs ST_MultiPoint\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('point (10.02 20.01)')))) from onerow",
+//			result = "ST_POINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('point (10.02 20.01)'))),ST_GeomFromText('point (10.02 20.01)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 10, 20 20)')))) from onerow",
+//			result = "ST_LINESTRING"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 10, 20 20)'))), ST_GeomFromText('linestring (10 10, 20 20)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))')))) from onerow",
+//			result = "ST_POLYGON"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))'))), ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromWKB(ST_AsBinary(ST_GeomFromText('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))')))) from onerow",
+//			result = "ST_MULTIPOINT"
+//			)
+//		}
+//	)
+
+public class ST_GeomFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeomFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Geometry.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Geometry.java
new file mode 100644
index 00000000000..3830362913c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Geometry.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.ql.exec.UDF;
+
+public abstract class ST_Geometry extends UDF {
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryAccessor.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryAccessor.java
new file mode 100644
index 00000000000..3f2bf9ae4e7
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryAccessor.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+/**
+ * Abstract base class for all accessors (ST_X/Y, IsBoolTests, ...)
+ *
+ */
+
+public abstract class ST_GeometryAccessor extends ST_Geometry {
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryN.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryN.java
new file mode 100644
index 00000000000..0c6c8c3154f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryN.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCMultiLineString;
+import com.esri.core.geometry.ogc.OGCMultiPoint;
+import com.esri.core.geometry.ogc.OGCMultiPolygon;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeometryN",
+    value = "_FUNC_(ST_GeometryCollection, n) - return the nth ST_Geometry in the collection (1-based index)",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))'), 3) FROM src LIMIT 1;  -- ST_Point(20 20)\n"
+        + "  SELECT _FUNC_(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))'), 2) FROM src LIMIT 1;  -- ST_Linestring(20 20, 7 8)\n")
+
+public class ST_GeometryN extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeometryN.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref, IntWritable index) {
+    if (geomref == null || geomref.getLength() == 0 || index == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    int idx = index.get() - 1;  // 1-based UI, 0-based engine
+    try {
+      GeometryUtils.OGCType ogcType = GeometryUtils.getType(geomref);
+      OGCGeometry ogcGeom = null;
+      switch (ogcType) {
+      case ST_POINT:
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, ogcType);
+        return null;
+      case ST_LINESTRING:
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, ogcType);
+        return null;
+      case ST_POLYGON:
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, ogcType);
+        return null;
+      case ST_MULTIPOINT:
+        ogcGeom = ((OGCMultiPoint) ogcGeometry).geometryN(idx);
+        break;
+      case ST_MULTILINESTRING:
+        ogcGeom = ((OGCMultiLineString) ogcGeometry).geometryN(idx);
+        break;
+      case ST_MULTIPOLYGON:
+        ogcGeom = ((OGCMultiPolygon) ogcGeometry).geometryN(idx);
+        break;
+      }
+      return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom);
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_GeometryN: " + e);
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryProcessing.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryProcessing.java
new file mode 100644
index 00000000000..463f46e5ac0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryProcessing.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+public class ST_GeometryProcessing extends ST_Geometry {
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryRelational.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryRelational.java
new file mode 100755
index 00000000000..d41700bdb13
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryRelational.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry.GeometryAccelerationDegree;
+import com.esri.core.geometry.OperatorContains;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.log4j.Logger;
+
+/**
+ * Abstract class that all simple relational tests (contains, touches, ...) extend from
+ *
+ */
+public abstract class ST_GeometryRelational extends GenericUDF {
+  private static final Logger LOG = Logger.getLogger(ST_GeometryRelational.class);
+
+  private static final int NUM_ARGS = 2;
+  private static final int GEOM_1 = 0;
+  private static final int GEOM_2 = 1;
+
+  private transient HiveGeometryOIHelper geomHelper1;
+  private transient HiveGeometryOIHelper geomHelper2;
+
+  private transient OperatorSimpleRelation opSimpleRelation;
+  private transient boolean firstRun = true;
+
+  private transient boolean geom1IsAccelerated = false;
+
+  /**
+   * Operators that extend this should return an instance of
+   * <code>OperatorSimpleRelation</code>
+   *
+   * @return operator for simple relationship tests
+   */
+  protected abstract OperatorSimpleRelation getRelationOperator();
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] OIs) throws UDFArgumentException {
+
+    opSimpleRelation = getRelationOperator();
+
+    if (OIs.length != NUM_ARGS) {
+      throw new UDFArgumentException("The " + opSimpleRelation.getType().toString().toLowerCase()
+          + " relationship operator takes exactly two arguments");
+    }
+
+    geomHelper1 = HiveGeometryOIHelper.create(OIs[GEOM_1], GEOM_1);
+    geomHelper2 = HiveGeometryOIHelper.create(OIs[GEOM_2], GEOM_2);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("OI[0]=" + geomHelper1);
+      LOG.debug("OI[1]=" + geomHelper2);
+    }
+
+    firstRun = true;
+    geom1IsAccelerated = false;
+
+    return PrimitiveObjectInspectorFactory.javaBooleanObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] args) throws HiveException {
+
+    OGCGeometry geom1 = geomHelper1.getGeometry(args);
+    OGCGeometry geom2 = geomHelper2.getGeometry(args);
+
+    if (geom1 == null || geom2 == null) {
+      return false;
+    }
+
+    if (firstRun && geomHelper1.isConstant()) {
+
+      // accelerate geometry 1 for quick relation operations since it is constant
+      geom1IsAccelerated = opSimpleRelation.accelerateGeometry(geom1.getEsriGeometry(), geom1.getEsriSpatialReference(),
+          GeometryAccelerationDegree.enumMedium);
+    }
+
+    firstRun = false;
+
+    return opSimpleRelation
+        .execute(geom1.getEsriGeometry(), geom2.getEsriGeometry(), geom1.getEsriSpatialReference(), null);
+  }
+
+  @Override
+  public void close() {
+    if (geom1IsAccelerated && geomHelper1 != null && geomHelper1.getConstantGeometry() != null) {
+      OperatorContains.deaccelerateGeometry(geomHelper1.getConstantGeometry().getEsriGeometry());
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryType.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryType.java
new file mode 100755
index 00000000000..6c97f7afb37
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_GeometryType.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_GeometryType",
+    value = "_FUNC_(geometry) - return type of geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- ST_Point\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- ST_LineString\n"
+        + "  > SELECT _FUNC_(ST_Polygon(2,0, 2,3, 3,0)) FROM src LIMIT 1;  -- ST_Polygon\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromText('point (10.02 20.01)')) from onerow",
+//			result = "ST_POINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "selectST_GeometryType(ST_GeomFromText('linestring (10 10, 20 20)')) from onerow",
+//			result = "ST_LINESTRING"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromText('polygon ((0 0, 0 10, 10 10, 0 0))')) from onerow",
+//			result = "ST_POLYGON"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromText('MULTIPOINT ((10 40), (40 30), (20 20), (30 10))')) from onerow",
+//			result = "ST_MULTIPOINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow",
+//			result = "ST_MULTILINESTRING"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_GeomFromText('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))')) from onerow",
+//			result = "ST_MULTIPOLYGON"
+//			)
+//		}
+//	)
+
+public class ST_GeometryType extends ST_Geometry {
+  static final Logger LOG = LoggerFactory.getLogger(ST_GeometryType.class.getName());
+
+  public Text evaluate(BytesWritable ref) {
+    if (ref == null || ref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    return new Text(GeometryUtils.getType(ref).toString());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_InteriorRingN.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_InteriorRingN.java
new file mode 100755
index 00000000000..c0e93f991c8
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_InteriorRingN.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCLineString;
+import com.esri.core.geometry.ogc.OGCPolygon;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_InteriorRingN",
+    value = "_FUNC_(ST_Polygon, n) - return ST_LineString which is the nth interior ring of the ST_Polygon (1-based index)",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))'), 1) FROM src LIMIT 1;  -- LINESTRING (1 1, 5 1, 1 5, 1 1)\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_InteriorRingN(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))'), 1), ST_LineString('linestring(1 1, 5 1, 1 5, 1 1)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_InteriorRingN(null, 1) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_InteriorRingN extends ST_GeometryProcessing {
+  static final Logger LOG = LoggerFactory.getLogger(ST_InteriorRingN.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref, IntWritable index) {
+    if (geomref == null || geomref.getLength() == 0 || index == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    int idx = index.get() - 1;  // 1-based UI, 0-based engine
+    if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_POLYGON) {
+      try {
+        OGCLineString hole = ((OGCPolygon) (ogcGeometry)).interiorRingN(idx);
+        return GeometryUtils.geometryToEsriShapeBytesWritable(hole);
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_InteriorRingN: " + e);
+        return null;
+      }
+    } else {
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Intersection.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Intersection.java
new file mode 100755
index 00000000000..f0d2eff1094
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Intersection.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Intersection",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - intersection of ST_Geometry1 & ST_Geometry2",
+    extended = "Example:\n" + "  SELECT ST_AsText(_FUNC_(ST_Point(1,1), ST_Point(1,1))) FROM onerow; -- POINT (1 1)\n"
+        + "  SELECT ST_AsText(_FUNC_(ST_GeomFromText('linestring(0 2, 0 0, 2 0)'), ST_GeomFromText('linestring(0 3, 0 1, 1 0, 3 0)'))) FROM onerow; -- MULTILINESTRING ((1 0, 2 0), (0 2, 0 1))\n"
+        + "  SELECT ST_AsText(_FUNC_(ST_LineString(0,2, 2,3), ST_Polygon(1,1, 4,1, 4,4, 1,4))) FROM onerow; -- MULTILINESTRING ((1 2.5, 2 3))\n"
+        + "  SELECT ST_AsText(_FUNC_(ST_Polygon(2,0, 2,3, 3,0), ST_Polygon(1,1, 4,1, 4,4, 1,4))) FROM onerow; -- MULTIPOLYGON (((2.67 1, 2 3, 2 1, 2.67 1)))\n"
+        + "OGC Compliance Notes : \n" + " In the case where the two geometries intersect in a lower dimension,"
+        + " ST_Intersection may drop the lower-dimension intersections, or output a closed linestring.\n"
+        + "SELECT ST_AsText(_FUNC_(ST_Polygon(2,0, 3,1, 2,1), ST_Polygon(1,1, 4,1, 4,4, 1,4))) FROM onerow; -- MULTIPOLYGON EMPTY or LINESTRING (2 1, 3 1, 2 1)\n")
+
+public class ST_Intersection extends ST_GeometryProcessing {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Intersection.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) {
+    if (geometryref1 == null || geometryref2 == null || geometryref1.getLength() == 0
+        || geometryref2.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
+      LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
+      return null;
+    }
+
+    OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
+    OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
+    if (ogcGeom1 == null || ogcGeom2 == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry commonGeom;
+    try {
+      commonGeom = ogcGeom1.intersection(ogcGeom2);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(commonGeom);
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_Intersection: " + e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Intersects.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Intersects.java
new file mode 100755
index 00000000000..0db3d30396f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Intersects.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorIntersects;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Intersects",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 intersects geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(ST_LineString(2,0, 2,3), ST_Polygon(1,1, 4,1, 4,4, 1,4))) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(ST_LineString(8,7, 7,8), ST_Polygon(1,1, 4,1, 4,4, 1,4)) from src LIMIT 1;  -- return false\n")
+
+public class ST_Intersects extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorIntersects.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s intersects %s", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Is3D.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Is3D.java
new file mode 100755
index 00000000000..e879bc211c3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Is3D.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Is3D",
+    value = "_FUNC_(geometry) - return true if the geometry object is three-dimensional",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,4, 4,1)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_Point(3., 4.)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_PointZ(3., 4., 2)) FROM src LIMIT 1;  -- true\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Is3D(ST_Point(0., 3.)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Is3D(ST_PointZ(0., 3., 1)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Is3D(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Is3D(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Is3D extends ST_GeometryAccessor {
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Is3D.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    resultBoolean.set(ogcGeometry.is3D());
+    return resultBoolean;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsClosed.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsClosed.java
new file mode 100755
index 00000000000..9f498b0b085
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsClosed.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.MultiPath;
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_IsClosed",
+    value = "_FUNC_(ST_[Multi]LineString) - return true if the linestring or multi-line is closed",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1;  -- true\n"
+        + "  SELECT _FUNC_(ST_LineString(0.,0., 3.,4.)) FROM src LIMIT 1;  -- false\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_IsClosed(ST_LineString(0.,0., 3.,4.)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsClosed(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsClosed(ST_MultiLineString('multilinestring ((0 0, 3 4, 2 2), (6 2, 7 8))')) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsClosed(ST_MultiLineString('multilinestring ((0 0, 3 4, 2 2, 0 0), (6 2, 7 8))')) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsClosed(ST_MultiLineString('multilinestring ((0 0, 3 4, 2 2, 0 0), (6 2, 7 5, 6 8, 6 2))')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsClosed(null) from onerow",
+//			result = "null"
+//			)
+//		}
+//	)
+
+public class ST_IsClosed extends ST_GeometryAccessor {
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_IsClosed.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+
+      switch (GeometryUtils.getType(geomref)) {
+      case ST_LINESTRING:
+      case ST_MULTILINESTRING:
+        MultiPath lines = (MultiPath) (ogcGeometry.getEsriGeometry());
+        int nPaths = lines.getPathCount();
+        boolean rslt = true;
+        for (int ix = 0; rslt && ix < nPaths; ix++) {
+          Point p0 = lines.getPoint(lines.getPathStart(ix));
+          Point pf = lines.getPoint(lines.getPathEnd(ix) - 1);
+          rslt = rslt && pf.equals(p0);  // no tolerance - OGC
+        }
+        resultBoolean.set(rslt);
+        return resultBoolean;
+      default:  // ST_IsClosed gives ERROR on Point or Polygon, on Postgres/Oracle
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref));
+        return null;
+      }
+
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_IsClosed" + e);
+      return null;
+    }
+
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsEmpty.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsEmpty.java
new file mode 100755
index 00000000000..51cc8ae0525
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsEmpty.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_IsEmpty",
+    value = "_FUNC_(geometry) - return true if the geometry object is empty of geometric information",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('point empty')) FROM src LIMIT 1;  -- true\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_IsEmpty(ST_GeomFromText('point empty')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsEmpty(ST_Intersection(st_point(2,0), ST_Point(1,1))) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsEmpty(ST_GeomFromText('point (10.02 20.01)')) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsEmpty(null) from onerow",
+//			result = "null"
+//			)
+//		}
+//	)
+
+public class ST_IsEmpty extends ST_GeometryAccessor {
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_IsEmpty.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      resultBoolean.set(ogcGeometry.isEmpty());
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_IsEmpty" + e);
+      return null;
+    }
+    return resultBoolean;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsMeasured.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsMeasured.java
new file mode 100755
index 00000000000..6fbd36f7bb9
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsMeasured.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_IsMeasured",
+    value = "_FUNC_(geometry) - return true if the geometry object is three-dimensional",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,4, 4,1)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_Point(3., 4.)) FROM src LIMIT 1;  -- false\n"
+        + "  > SELECT _FUNC_(ST_PointM(3., 4., 2)) FROM src LIMIT 1;  -- true\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_IsMeasured(ST_Point(0., 3.)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsMeasured(ST_Point('point m(0. 3. 1)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsMeasured(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsMeasured(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_IsMeasured extends ST_GeometryAccessor {
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_IsMeasured.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    resultBoolean.set(ogcGeometry.isMeasured());
+    return resultBoolean;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsRing.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsRing.java
new file mode 100755
index 00000000000..d0361bb551d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsRing.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCLineString;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_IsRing",
+    value = "_FUNC_(ST_LineString) - return true if the linestring is closed & simple",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) FROM src LIMIT 1;  -- true\n"
+        + "  SELECT _FUNC_(ST_LineString(0.,0., 1.,1., 1.,2., 2.,1., 1.,1., 0.,0.)) FROM src LIMIT 1;  -- false\n"
+        + "  SELECT _FUNC_(ST_LineString(0.,0., 3.,4.)) FROM src LIMIT 1;  -- false\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_IsRing(ST_LineString(0.,0., 3.,4., 0.,4., 0.,0.)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsRing(ST_LineString(0.,0., 3.,4.)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsRing(ST_LineString(0.,0., 1.,1., 1.,2., 2.,1., 1.,1., 0.,0.)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsRing(null) from onerow",
+//			result = "null"
+//			)
+//		}
+//	)
+
+public class ST_IsRing extends ST_GeometryAccessor {
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_IsRing.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+
+      switch (GeometryUtils.getType(geomref)) {
+      case ST_LINESTRING:
+        OGCLineString lns = (OGCLineString) ogcGeometry;
+        resultBoolean.set(lns.isClosed() && lns.isSimple());
+        return resultBoolean;
+      default:  // ST_IsRing gives ERROR on Point, Polygon, or MultiLineString - on Postgres
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref));
+        return null;
+      }
+
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_IsRing" + e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsSimple.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsSimple.java
new file mode 100755
index 00000000000..c7de9af5974
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_IsSimple.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_IsSimple",
+    value = "_FUNC_(geometry) - return true if geometry is simple",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1; -- true\n"
+        + "  > SELECT _FUNC_(ST_LineString(0.,0., 1.,1., 0.,1., 1.,0.)) FROM src LIMIT 1; -- false\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_IsSimple(ST_Point(0,0)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsSimple(ST_MultiPoint(0,0, 2,2)) from onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsSimple(ST_LineString(0.,0., 1.,1., 0.,1., 1.,0.)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsSimple(ST_LineString(0,0, 1,0, 1,1, 0,2, 2,2, 1,1, 2,0)) from onerow",
+//			result = "false"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_IsSimple(null) from onerow",
+//			result = "null"
+//			)
+//		}
+//	)
+
+public class ST_IsSimple extends ST_GeometryAccessor {
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_IsSimple.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      resultBoolean.set(ogcGeometry.isSimple());
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_IsSimple" + e);
+      return null;
+    }
+    return resultBoolean;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Length.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Length.java
new file mode 100755
index 00000000000..3a79855e9be
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Length.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Length",
+    value = "_FUNC_(line) - returns the length of line",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Line(0.0,0.0, 3.0,4.0)) FROM src LIMIT 1;  --  5.0")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Length(ST_SetSRID(ST_LineString(0.0,0.0, 3.0,4.0), 0)) from onerow",
+//			result = "5.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Length(ST_SetSRID(ST_MultiLineString(array(1,1, 1,2), array(10,10, 20,10)), 0)) from onerow",
+//			result = "11"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Length(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Length extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Length.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    resultDouble.set(ogcGeometry.getEsriGeometry().calculateLength2D());
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_LineFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_LineFromWKB.java
new file mode 100755
index 00000000000..2b19736b4a0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_LineFromWKB.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_LineFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_LineString from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('linestring (1 0, 2 3)'))) FROM src LIMIT 1;  -- constructs ST_Linestring\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_LineFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 10, 20 20)')))) from onerow",
+//			result = "ST_LINESTRING"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_LineFromWKB(ST_AsBinary(ST_GeomFromText('linestring (10 10, 20 20)'))), ST_GeomFromText('linestring (10 10, 20 20)')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_LineFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_LineFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      if (ogcObj.geometryType().equals("LineString")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_LineString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_LineString.java
new file mode 100755
index 00000000000..a50c93e9b99
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_LineString.java
@@ -0,0 +1,159 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.Polyline;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+
+@Description(name = "ST_LineString",
+    value = "_FUNC_(x, y, [x, y]*) - constructor for 2D line string\n"
+        + "_FUNC_(array(x+), array(y+)) - constructor for 2D line string\n"
+        + "_FUNC_(array(ST_Point(x,y)+)) - constructor for 2D line string\n"
+        + "_FUNC_('linestring( ... )') - constructor for 2D line string",
+    extended = "Example:\n" + "  SELECT _FUNC_(1, 1, 2, 2, 3, 3) from src LIMIT 1;\n"
+        + "  SELECT _FUNC_('linestring(1 1, 2 2, 3 3)') from src LIMIT 1;")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_Linestring('linestring (10 10, 20 20)')) from onerow",
+//			result = "ST_LINESTRING"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_Linestring('linestring (10 10, 20 20)'), ST_GeomFromText('linestring (10 10, 20 20)')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_LineString extends ST_Geometry {
+  static final Logger LOG = LoggerFactory.getLogger(ST_LineString.class.getName());
+
+  // Number-pairs constructor
+  public BytesWritable evaluate(DoubleWritable... xyPairs) throws UDFArgumentException {
+
+    if (xyPairs == null || xyPairs.length == 0 || xyPairs.length % 2 != 0) {
+      return null;
+    }
+
+    try {
+      Polyline linestring = new Polyline();
+      linestring.startPath(xyPairs[0].get(), xyPairs[1].get());
+
+      for (int i = 2; i < xyPairs.length; i += 2) {
+        linestring.lineTo(xyPairs[i].get(), xyPairs[i + 1].get());
+      }
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(linestring, null));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_LineString: " + e);
+      return null;
+    }
+  }
+
+  // constructor from arrays of X and Y coordinates
+  public BytesWritable evaluate(ArrayList<DoubleWritable> xs, ArrayList<DoubleWritable> ys)
+      throws UDFArgumentException {
+    if (null == xs || null == ys || xs.size() == 0 || ys.size() == 0 || xs.size() != ys.size()) {
+      return null;
+    }
+
+    try {
+      Polyline linestring = new Polyline();
+
+      for (int ix = 0; ix < xs.size(); ++ix) {
+        DoubleWritable xdw = xs.get(ix), ydw = ys.get(ix);
+        if (xdw == null || ydw == null) {
+          LogUtils.Log_ArgumentsNull(LOG);
+        }
+        if (ix == 0) {
+          linestring.startPath(xdw.get(), ydw.get());
+        } else {
+          linestring.lineTo(xdw.get(), ydw.get());
+        }
+      }
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(linestring, null));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_LineString: " + e);
+      return null;
+    }
+  }
+
+  // constructor from array of points
+  public BytesWritable evaluate(ArrayList<BytesWritable> points) throws UDFArgumentException {
+    if (null == points || points.size() == 0) {
+      return null;
+    }
+
+    try {
+      Polyline linestring = new Polyline();
+
+      for (int ix = 0; ix < points.size(); ++ix) {
+        BytesWritable geomref = points.get(ix);
+        OGCGeometry gcur = GeometryUtils.geometryFromEsriShape(geomref);
+        if (gcur == null || GeometryUtils.getType(geomref) != GeometryUtils.OGCType.ST_POINT) {
+          if (gcur == null)
+            LogUtils.Log_ArgumentsNull(LOG);
+          else
+            LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref));
+          return null;
+        }
+        if (ix == 0) {
+          linestring.startPath((Point) gcur.getEsriGeometry());
+        } else {
+          linestring.lineTo((Point) gcur.getEsriGeometry());
+        }
+      }
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(linestring, null));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_LineString: " + e);
+      return null;
+    }
+  }
+
+  // WKT constructor - can use SetSRID on constructed multi-point
+  public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException {
+    String wkt = wkwrap.toString();
+    try {
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(null);
+      if (ogcObj.geometryType().equals("LineString")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_M.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_M.java
new file mode 100755
index 00000000000..ef176e937b0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_M.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_M",
+    value = "_FUNC_(geometry) - return true if the geometry object is three-dimensional",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_PointM(3., 4., 2)) FROM src LIMIT 1;  -- 2\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_M(ST_Point('point m(0. 3. 1)')) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_M(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow",
+//			result = "2.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_M(ST_Point(0., 3.)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_M(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_M extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_M.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      return null;
+    }
+    if (!ogcGeometry.isMeasured()) {
+      LogUtils.Log_NotMeasured(LOG);
+      return null;
+    }
+
+    switch (GeometryUtils.getType(geomref)) {
+    case ST_POINT:
+      OGCPoint pt = (OGCPoint) ogcGeometry;
+      resultDouble.set(pt.M());
+      return resultDouble;
+    default:
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MLineFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MLineFromWKB.java
new file mode 100755
index 00000000000..01e5cd70d02
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MLineFromWKB.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_MLineFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_MultiLineString from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multilinestring ((1 0, 2 3), (5 7, 7 5))'))) FROM src LIMIT 1;  -- constructs ST_MultiLineString\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_MLineFromWKB(ST_AsBinary(ST_GeomFromText('multilinestring ((1 2, 2 1),(10 10, 20 20))')))) from onerow",
+//			result = "ST_MULTILINESTRING"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_MLineFromWKB(ST_AsBinary(ST_GeomFromText('multilinestring ((1 2, 2 1),(10 10, 20 20))'))), ST_GeomFromText('multilinestring ((1 2, 2 1),(10 10, 20 20))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_MLineFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_MLineFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      String gType = ogcObj.geometryType();
+      if (gType.equals("MultiLineString") || gType.equals("LineString")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MPointFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MPointFromWKB.java
new file mode 100755
index 00000000000..2fe92022bb3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MPointFromWKB.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_MPointFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_MultiPoint from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multipoint ((1 0), (2 3))'))) FROM src LIMIT 1;  -- constructs ST_MultiPoint\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_MPointFromWKB(ST_AsBinary(ST_GeomFromText('multipoint ((10 10), (20 20))')))) from onerow",
+//			result = "ST_MULTIPOINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_MPointFromWKB(ST_AsBinary(ST_GeomFromText('multipoint ((10 10), (20 20))'))), ST_GeomFromText('multipoint ((10 10), (20 20))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_MPointFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_MPointFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      String gType = ogcObj.geometryType();
+      if (gType.equals("MultiPoint") || gType.equals("Point")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MPolyFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MPolyFromWKB.java
new file mode 100755
index 00000000000..1092226df06
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MPolyFromWKB.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_MPolyFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_MultiPolygon from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))'))) FROM src LIMIT 1;  -- constructs ST_MultiPolygon\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_MPolyFromWKB(ST_AsBinary(ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))')))) from onerow",
+//			result = "ST_MULTIPOLYGON"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_MPolyFromWKB(ST_AsBinary(ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))'))), ST_GeomFromText('multipolygon (((0 0, 1 0, 0 1, 0 0)), ((2 2, 1 2, 2 1, 2 2)))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_MPolyFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_MPolyFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      String gType = ogcObj.geometryType();
+      if (gType.equals("MultiPolygon") || gType.equals("Polygon")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxM.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxM.java
new file mode 100755
index 00000000000..1964ca43723
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxM.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MaxM",
+    value = "_FUNC_(geometry) - returns the maximum M coordinate of geometry",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_PointM(1.5, 2.5, 2)) FROM src LIMIT 1;  -- 2\n"
+        + "  SELECT _FUNC_(ST_LineString('linestring m (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1;  -- 1\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxM(ST_PointM(0., 3., 1.)) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxM(ST_GeomFromText('linestring m (10 10 2, 20 20 4)')) from onerow",
+//			result = "4.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxM(ST_MultiPoint('multipoint m((0 0 1), (2 2 3)')) from onerow",
+//			result = "3.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxM(ST_Point(1,2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxM(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MaxM extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MaxM.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!ogcGeometry.isMeasured()) {
+      LogUtils.Log_NotMeasured(LOG);
+      return null;
+    }
+
+    resultDouble.set(ogcGeometry.MaxMeasure());
+    return resultDouble;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxX.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxX.java
new file mode 100755
index 00000000000..e1000c2c8f4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxX.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MaxX",
+    value = "_FUNC_(geometry) - returns the maximum X coordinate of geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 1.5\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- 3.0\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(ST_Point(1,2)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "3.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(ST_MultiPoint(0,0, 2,2)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow",
+//			result = "20"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxX(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MaxX extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MaxX.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Envelope envBound = new Envelope();
+    ogcGeometry.getEsriGeometry().queryEnvelope(envBound);
+    resultDouble.set(envBound.getXMax());
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxY.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxY.java
new file mode 100755
index 00000000000..3ffb17e3034
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxY.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MaxY",
+    value = "_FUNC_(geometry) - returns the maximum Y coordinate of geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 2.5\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- 2.5\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(ST_Point(1,2)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "2.5"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(ST_MultiPoint(0,0, 4,2)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 25, 20))) from onerow",
+//			result = "20"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxY(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MaxY extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MaxY.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Envelope envBound = new Envelope();
+    ogcGeometry.getEsriGeometry().queryEnvelope(envBound);
+    resultDouble.set(envBound.getYMax());
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxZ.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxZ.java
new file mode 100755
index 00000000000..c1f526ce9b3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MaxZ.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MaxZ",
+    value = "_FUNC_(geometry) - returns the maximum Z coordinate of geometry",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_PointZ(1.5, 2.5, 2)) FROM src LIMIT 1;  -- 2\n"
+        + "  SELECT _FUNC_(ST_LineString('linestring z (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1;  -- 1\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxZ(ST_PointZ(0., 3., 1.)) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxZ(ST_GeomFromText('linestring z (10 10 2, 20 20 4)')) from onerow",
+//			result = "4.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxZ(ST_MultiPoint('multipoint z((0 0 1), (2 2 3))')) from onerow",
+//			result = "3.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxZ(ST_Point(1,2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MaxZ(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MaxZ extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MaxZ.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!ogcGeometry.is3D()) {
+      LogUtils.Log_Not3D(LOG);
+      return null;
+    }
+
+    resultDouble.set(ogcGeometry.MaxZ());
+    return resultDouble;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinM.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinM.java
new file mode 100755
index 00000000000..a202754bfba
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinM.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MinM",
+    value = "_FUNC_(geometry) - returns the minimum M coordinate of geometry",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_PointM(1.5, 2.5, 2)) FROM src LIMIT 1;  -- 2\n"
+        + "  SELECT _FUNC_(ST_LineString('linestring m (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1;  -- 1\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MinM(ST_PointM(0., 3., 1.)) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinM(ST_GeomFromText('linestring m (10 10 2, 20 20 4)')) from onerow",
+//			result = "2.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinM(ST_MultiPoint('multipoint m((0 0 1), (2 2 3)')) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinM(ST_Point(1,2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinM(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MinM extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MinM.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!ogcGeometry.isMeasured()) {
+      LogUtils.Log_NotMeasured(LOG);
+      return null;
+    }
+
+    resultDouble.set(ogcGeometry.MinMeasure());
+    return resultDouble;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinX.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinX.java
new file mode 100755
index 00000000000..10d86a77b64
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinX.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MinX",
+    value = "_FUNC_(geometry) - returns the minimum X coordinate of geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 1.5\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- 3.0\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(ST_Point(1,2)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "1.5"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(ST_MultiPoint(0,0, 2,2)) from onerow",
+//			result = "0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinX(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MinX extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MinX.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Envelope envBound = new Envelope();
+    ogcGeometry.getEsriGeometry().queryEnvelope(envBound);
+    resultDouble.set(envBound.getXMin());
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinY.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinY.java
new file mode 100755
index 00000000000..ea90923f50f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinY.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Envelope;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MinY",
+    value = "_FUNC_(geometry) - returns the minimum Y coordinate of geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 2.5\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- 2.2\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(ST_Point(1,2)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "2.2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(ST_Polygon(1,1, 1,4, 4,4, 4,1)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(ST_MultiPoint(0,0, 2,2)) from onerow",
+//			result = "0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinY(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MinY extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MinY.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Envelope envBound = new Envelope();
+    ogcGeometry.getEsriGeometry().queryEnvelope(envBound);
+    resultDouble.set(envBound.getYMin());
+    return resultDouble;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinZ.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinZ.java
new file mode 100755
index 00000000000..5fbebc4a7cb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MinZ.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MinZ",
+    value = "_FUNC_(geometry) - returns the minimum Z coordinate of geometry",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_PointZ(1.5, 2.5, 2)) FROM src LIMIT 1;  -- 2\n"
+        + "  SELECT _FUNC_(ST_LineString('linestring z (1.5 2.5 2, 3.0 2.2 1)')) FROM src LIMIT 1;  -- 1\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_MinZ(ST_PointZ(0., 3., 1.)) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinZ(ST_GeomFromText('linestring z (10 10 2, 20 20 4)')) from onerow",
+//			result = "2.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinZ(ST_MultiPoint('multipoint z((0 0 1), (2 2 3))')) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinZ(ST_Point(1,2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_MinZ(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_MinZ extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_MinZ.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!ogcGeometry.is3D()) {
+      LogUtils.Log_Not3D(LOG);
+      return null;
+    }
+
+    resultDouble.set(ogcGeometry.MinZ());
+    return resultDouble;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiLineString.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiLineString.java
new file mode 100755
index 00000000000..0fc4ddcab6e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiLineString.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Polyline;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+@Description(name = "ST_MultiLineString",
+    value =
+        "_FUNC_(array(x1, y1, x2, y2, ... ), array(x1, y1, x2, y2, ... ), ... ) - constructor for 2D multi line string\n"
+            + "_FUNC_('multilinestring( ... )') - constructor for 2D multi line string",
+    extended = "Example:\n" + "  SELECT _FUNC_(array(1, 1, 2, 2), array(10, 10, 20, 20)) from src LIMIT 1;\n"
+        + "  SELECT _FUNC_('multilinestring ((1 1, 2 2), (10 10, 20 20))', 0) from src LIMIT 1;")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select st_asjson(ST_MultiLineString(1, 1, 2, 2, 3, 3)) from onerow",
+//			result = "{\"points\":[[1.0,1.0],[2.0,2.0],[3.0,3.0]]}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_MultiLinestring('multilinestring ((2 4, 10 10), (20 20, 7 8))'), ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//)
+public class ST_MultiLineString extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_MultiLineString.class.getName());
+
+  // Number-pairs constructor
+  public BytesWritable evaluate(List<DoubleWritable>... multipaths) throws UDFArgumentLengthException {
+
+    if (multipaths == null || multipaths.length == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      Polyline mPolyline = new Polyline();
+
+      int arg_idx = 0;
+      for (List<DoubleWritable> multipath : multipaths) {
+        if (multipath.size() % 2 != 0) {
+          LogUtils.Log_VariableArgumentLengthXY(LOG, arg_idx);
+          return null;
+        }
+
+        mPolyline.startPath(multipath.get(0).get(), multipath.get(1).get());
+
+        for (int i = 2; i < multipath.size(); i += 2) {
+          mPolyline.lineTo(multipath.get(i).get(), multipath.get(i + 1).get());
+        }
+        arg_idx++;
+      }
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(mPolyline, null, true));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_MultiLineString: " + e);
+      return null;
+    }
+  }
+
+  // WKT constructor  -  can use SetSRID on constructed multi-linestring
+  public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException {
+    String wkt = wkwrap.toString();
+    try {
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(null);
+      if (ogcObj.geometryType().equals("MultiLineString")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiPoint.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiPoint.java
new file mode 100755
index 00000000000..3ede5750377
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiPoint.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.MultiPoint;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_MultiPoint",
+    value = "_FUNC_(x1, y1, x2, y2, x3, y3) - constructor for 2D multipoint\n"
+        + "_FUNC_('multipoint( ... )') - constructor for 2D multipoint",
+    extended = "Example:\n" + "  SELECT _FUNC_(1, 1, 2, 2, 3, 3) from src LIMIT 1; -- multipoint with 3 points\n"
+        + "  SELECT _FUNC_('MULTIPOINT ((10 40), (40 30))') from src LIMIT 1; -- multipoint of 2 points")
+//@HivePdkUnitTests(
+//	cases = { 
+//		@HivePdkUnitTest(
+//			query = "select st_asjson(st_multipoint(1, 1, 2, 2, 3, 3)) from onerow",
+//			result = "{\"points\":[[1.0,1.0],[2.0,2.0],[3.0,3.0]]}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_MultiPoint('MULTIPOINT ((10 40), (40 30))')) from onerow",
+//			result = "ST_MULTIPOINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_MultiPoint('MULTIPOINT ((10 40), (40 30))'), ST_GeomFromText('MULTIPOINT ((10 40), (40 30))')) from onerow",
+//			result = "true"
+//			)
+//	}
+//)
+
+public class ST_MultiPoint extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_MultiPoint.class.getName());
+
+  // Number-pairs constructor
+  public BytesWritable evaluate(DoubleWritable... xyPairs) throws UDFArgumentLengthException {
+
+    if (xyPairs == null || xyPairs.length == 0 || xyPairs.length % 2 != 0) {
+      LogUtils.Log_VariableArgumentLengthXY(LOG);
+      return null;
+    }
+
+    try {
+      MultiPoint mPoint = new MultiPoint();
+
+      for (int i = 0; i < xyPairs.length; i += 2) {
+        mPoint.add(xyPairs[i].get(), xyPairs[i + 1].get());
+      }
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(mPoint, null, true));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_MultiPoint: " + e);
+      return null;
+    }
+  }
+
+  // WKT constructor - can use SetSRID on constructed multi-point
+  public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException {
+    String wkt = wkwrap.toString();
+    try {
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(null);
+      if (ogcObj.geometryType().equals("MultiPoint")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiPolygon.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiPolygon.java
new file mode 100755
index 00000000000..8f981a3c402
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_MultiPolygon.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+@Description(name = "ST_MultiPolygon",
+    value =
+        "_FUNC_(array(x1, y1, x2, y2, ... ), array(x1, y1, x2, y2, ... ), ... ) - constructor for 2D multi polygon\n"
+            + "_FUNC_('multipolygon ( ... )') - constructor for 2D multi polygon",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(array(1, 1, 1, 2, 2, 2, 2, 1), array(3, 3, 3, 4, 4, 4, 4, 3)) from src LIMIT 1;\n"
+        + "  SELECT _FUNC_('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))') from src LIMIT 1;")
+//@HivePdkUnitTests(
+//	cases = { 
+//		@HivePdkUnitTest(
+//			query = "select st_asjson(st_multipolygon(array(1, 1, 1, 2, 2, 2, 2, 1), array(3, 3, 3, 4, 4, 4, 4, 3))) from onerow;",
+//			result = "{\"rings\":[[[1.0,1.0],[1.0,2.0],[2.0,2.0],[2.0,1.0],[1.0,1.0]],[[3.0,3.0],[3.0,4.0],[4.0,4.0],[4.0,3.0],[3.0,3.0]]]}"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_MultiPolygon('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))'), ST_GeomCollection('multipolygon (((0 0, 0 1, 1 0, 0 0)), ((2 2, 2 3, 3 2, 2 2)))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//)
+public class ST_MultiPolygon extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_MultiPolygon.class.getName());
+
+  // Number-pairs constructor
+  public BytesWritable evaluate(List<DoubleWritable>... multipaths) throws UDFArgumentLengthException {
+
+    if (multipaths == null || multipaths.length == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      String wkt = "multipolygon(";
+      int arg_idx = 0;
+      String comma = "";  // comma except first time
+
+      for (List<DoubleWritable> multipath : multipaths) {
+        int len = multipath.size();
+        if (len < 6 || len % 2 != 0) {
+          LogUtils.Log_VariableArgumentLengthXY(LOG, arg_idx);
+          return null;
+        }
+
+        double xStart = multipath.get(0).get(), yStart = multipath.get(1).get();
+        wkt += comma + "((" + xStart + " " + yStart;
+
+        int ix;  // index persists after loop
+        for (ix = 2; ix < len; ix += 2) {
+          wkt += ", " + multipath.get(ix) + " " + multipath.get(ix + 1);
+        }
+        double xEnd = multipath.get(ix - 2).get(), yEnd = multipath.get(ix - 1).get();
+        // This counts on the same string getting parsed to double exactly equally
+        if (xEnd != xStart || yEnd != yStart)
+          wkt += ", " + xStart + " " + yStart;  // close the ring
+
+        wkt += "))";
+        comma = ",";
+        arg_idx++;
+      }
+      wkt += ")";
+
+      return evaluate(new Text(wkt));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_MultiPolygon: " + e);
+      return null;
+    }
+  }
+
+  // WKT constructor - can use SetSRID on constructed multi-polygon
+  public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException {
+    String wkt = wkwrap.toString();
+    try {
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(null);
+      if (ogcObj.geometryType().equals("MultiPolygon")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumGeometries.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumGeometries.java
new file mode 100755
index 00000000000..cfca1b5294a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumGeometries.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCMultiLineString;
+import com.esri.core.geometry.ogc.OGCMultiPoint;
+import com.esri.core.geometry.ogc.OGCMultiPolygon;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_NumGeometries",
+    value = "_FUNC_(ST_GeometryCollection) - return the number of geometries in the geometry collection",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))')) FROM src LIMIT 1;  -- 4\n"
+        + "  SELECT _FUNC_(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) FROM src LIMIT 1;  -- 2\n")
+
+public class ST_NumGeometries extends ST_GeometryAccessor {
+  final IntWritable resultInt = new IntWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_NumGeometries.class.getName());
+
+  public IntWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      GeometryUtils.OGCType ogcType = GeometryUtils.getType(geomref);
+      switch (ogcType) {
+      case ST_POINT:
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, ogcType);
+        return null;
+      case ST_LINESTRING:
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, ogcType);
+        return null;
+      case ST_POLYGON:
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, ogcType);
+        return null;
+      case ST_MULTIPOINT:
+        resultInt.set(((OGCMultiPoint) ogcGeometry).numGeometries());
+        break;
+      case ST_MULTILINESTRING:
+        resultInt.set(((OGCMultiLineString) ogcGeometry).numGeometries());
+        break;
+      case ST_MULTIPOLYGON:
+        resultInt.set(((OGCMultiPolygon) ogcGeometry).numGeometries());
+        break;
+      }
+    } catch (ClassCastException cce) {  // single vs Multi geometry type
+      resultInt.set(1);
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_NumGeometries: " + e);
+      return null;
+    }
+    return resultInt;
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumInteriorRing.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumInteriorRing.java
new file mode 100755
index 00000000000..01fb8264cea
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumInteriorRing.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCPolygon;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_NumInteriorRing",
+    value = "_FUNC_(ST_Polygon) - return the number of interior rings in the polygon",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Polygon(1,1, 1,4, 4,1)) FROM src LIMIT 1;  -- 0\n"
+        + "  SELECT _FUNC_(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) FROM src LIMIT 1;  -- 1\n")
+
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_NumInteriorRing(ST_Polygon('polygon ((1 1, 4 1, 1 4))')) from onerow",
+//			result = "0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumInteriorRing(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumInteriorRing(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_NumInteriorRing extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_NumInteriorRing.class.getName());
+  final IntWritable resultInt = new IntWritable();
+
+  public IntWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_POLYGON) {
+      try {
+        resultInt.set(((OGCPolygon) (ogcGeometry)).numInteriorRing());
+        return resultInt;
+      } catch (Exception e) {
+        LogUtils.Log_InternalError(LOG, "ST_NumInteriorRing: " + e);
+        return null;
+      }
+    } else {
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumPoints.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumPoints.java
new file mode 100755
index 00000000000..2ec95c090cb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_NumPoints.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.MultiPath;
+import com.esri.core.geometry.MultiPoint;
+import com.esri.core.geometry.Polygon;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_NumPoints",
+    value = "_FUNC_(geometry) - return the number of points in the geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  -- 1\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- 2\n"
+        + "  > SELECT _FUNC_(ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))')) FROM src LIMIT 1;  -- 4\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_NumPoints(ST_Point(0., 3.)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumPoints(ST_LineString(0.,0., 3.,4.)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumPoints(ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))')) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumPoints(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))', 0)) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumPoints(ST_GeomFromText('multilinestring ((2 4, 10 10), (20 20, 7 8))')) from onerow",
+//			result = "4"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_NumPoints(ST_Point('point empty')) from onerow",
+//			result = "0"
+//			)
+//		}
+//	)
+
+public class ST_NumPoints extends ST_GeometryAccessor {
+  final IntWritable resultInt = new IntWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_IsClosed.class.getName());
+
+  public IntWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Geometry esriGeom = ogcGeometry.getEsriGeometry();
+    switch (esriGeom.getType()) {
+    case Point:
+      resultInt.set(esriGeom.isEmpty() ? 0 : 1);
+      break;
+    case MultiPoint:
+      resultInt.set(((MultiPoint) (esriGeom)).getPointCount());
+      break;
+    case Polygon:
+      Polygon polygon = (Polygon) (esriGeom);
+      resultInt.set(polygon.getPointCount() + polygon.getPathCount());
+      break;
+    default:
+      resultInt.set(((MultiPath) (esriGeom)).getPointCount());
+      break;
+    }
+    return resultInt;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Overlaps.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Overlaps.java
new file mode 100755
index 00000000000..0810f0cd595
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Overlaps.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorOverlaps;
+import com.esri.core.geometry.OperatorSimpleRelation;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Overlaps",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 overlaps geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(st_polygon(2,0, 2,3, 3,0), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1;  -- return false")
+
+public class ST_Overlaps extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorOverlaps.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s overlaps %s", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Point.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Point.java
new file mode 100755
index 00000000000..75969e38eca
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Point.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Point",
+    value = "_FUNC_(x, y) - constructor for 2D point\n" + "_FUNC_('point (x y)') - constructor for 2D point",
+    extended = "Example:\n" + "  SELECT _FUNC_(longitude, latitude) from src LIMIT 1;\n"
+        + "  SELECT _FUNC_('point (0 0)') from src LIMIT 1;")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_Point('point (10.02 20.01)')) from onerow",
+//			result = "ST_POINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_Point('point (10.02 20.01)'), ST_GeomFromText('point (10.02 20.01)')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_Point extends ST_Geometry {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Point.class.getName());
+
+  // Number-pair constructor - 2D
+  public BytesWritable evaluate(DoubleWritable x, DoubleWritable y) {
+    return evaluate(x, y, null, null);
+  }
+
+  // Number-triplet constructor - 3D
+  public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z) {
+    return evaluate(x, y, z, null);
+  }
+
+  // Number-list constructor - ZM
+  public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z, DoubleWritable m) {
+    if (x == null || y == null) {
+      //LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    try {
+      Point stPt = new Point(x.get(), y.get());
+      if (z != null)
+        stPt.setZ(z.get());
+      if (m != null)
+        stPt.setM(m.get());
+      BytesWritable ret =
+          GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(stPt, null));
+      return ret;
+    } catch (Exception e) {
+      //LogUtils.Log_InternalError(LOG, "ST_Point: " + e);
+      return null;
+    }
+  }
+
+  // WKT constructor - can use SetSRID on constructed point
+  public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException {
+    String wkt = wkwrap.toString();
+    try {
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(null);
+      if (ogcObj.geometryType().equals("Point")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointFromWKB.java
new file mode 100755
index 00000000000..5bd428d0cbc
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointFromWKB.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_PointFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_Point from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('point (1 0))'))) FROM src LIMIT 1;  -- constructs ST_Point\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_PointFromWKB(ST_AsBinary(ST_GeomFromText('point (10 10)')))) from onerow",
+//			result = "ST_POINT"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_PointFromWKB(ST_AsBinary(ST_GeomFromText('point (10 10)'))), ST_GeomFromText('point (10 10)')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_PointFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_PointFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      if (ogcObj.geometryType().equals("Point")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointN.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointN.java
new file mode 100755
index 00000000000..7300fab3cde
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointN.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.MultiPath;
+import com.esri.core.geometry.MultiPoint;
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_PointN",
+    value = "_FUNC_(ST_Geometry, n) - returns the point that is the nth vertex in an ST_Linestring or ST_MultiPoint (1-based index)",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2), 2) FROM src LIMIT 1;  -- POINT(3.0 2.2)\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_X(ST_PointN(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))', 0), 2)) from onerow",
+//			result = "40"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Y(ST_PointN(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))', 0), 2)) from onerow",
+//			result = "30"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_X(ST_PointN(ST_GeomFromtext('linestring (10.02 20.01, 10.32 23.98, 11.92 25.64)'), 1)) from onerow",
+//			result = "10.02"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Y(ST_PointN(ST_GeomFromtext('linestring (10.02 20.01, 10.32 23.98, 11.92 25.64)'), 1)) from onerow",
+//			result = "20.01"
+//			),
+//		@HivePdkUnitTest(
+//			query = "ST_PointN(ST_GeomFromText('multipoint ((10 40), (40 30), (20 20), (30 10))', 0), 5) from onerow",
+//			result = "null"
+//			)
+//		}
+//	)
+
+public class ST_PointN extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_PointN.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref, IntWritable index) {
+    if (geomref == null || geomref.getLength() == 0 || index == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    Geometry esriGeom = ogcGeometry.getEsriGeometry();
+    Point pn = null;
+    int idx = index.get();
+    idx = (idx == 0) ? 0 : idx - 1;  // consistency with SDE ST_Geometry
+    switch (esriGeom.getType()) {
+    case Line:
+    case Polyline:
+      MultiPath lines = (MultiPath) (esriGeom);
+      try {
+        pn = lines.getPoint(idx);
+      } catch (Exception e) {
+        LogUtils.Log_InvalidIndex(LOG, idx + 1, 1, lines.getPointCount());
+        return null;
+      }
+      break;
+    case MultiPoint:
+      MultiPoint mp = (MultiPoint) (esriGeom);
+      try {
+        pn = mp.getPoint(idx);
+      } catch (Exception e) {
+        LogUtils.Log_InvalidIndex(LOG, idx + 1, 1, mp.getPointCount());
+        return null;
+      }
+      break;
+    default:  // ST_Geometry ST_PointN gives ERROR on Point or Polygon (on PostgreSQL)
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref));
+      return null;
+    }
+    return GeometryUtils
+        .geometryToEsriShapeBytesWritable(pn, GeometryUtils.getWKID(geomref), GeometryUtils.OGCType.ST_POINT);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointZ.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointZ.java
new file mode 100644
index 00000000000..5018a2609ab
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PointZ.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+
+@Description(name = "ST_PointZ",
+    value = "_FUNC_(x, y, z) - constructor for 3D point",
+    extended = "Example:\n" + "SELECT _FUNC_(longitude, latitude, elevation) from src LIMIT 1;") public class ST_PointZ
+    extends ST_Geometry {
+
+  public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z) {
+    return evaluate(x, y, z, null);
+  }
+
+  // ZM
+  public BytesWritable evaluate(DoubleWritable x, DoubleWritable y, DoubleWritable z, DoubleWritable m) {
+    if (x == null || y == null || z == null) {
+      return null;
+    }
+    Point stPt = new Point(x.get(), y.get(), z.get());
+    if (m != null)
+      stPt.setM(m.get());
+    return GeometryUtils.geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(stPt, null));
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PolyFromWKB.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PolyFromWKB.java
new file mode 100755
index 00000000000..6c9f4ef68ff
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_PolyFromWKB.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.nio.ByteBuffer;
+
+@Description(name = "ST_PolyFromWKB",
+    value = "_FUNC_(wkb) - construct an ST_Polygon from OGC well-known binary",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))'))) FROM src LIMIT 1;  -- constructs ST_Polygon\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_PolyFromWKB(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 1 0, 0 1, 0 0))')))) from onerow",
+//			result = "ST_POLYGON"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_PolyFromWKB(ST_AsBinary(ST_GeomFromText('polygon ((0 0, 1 0, 0 1, 0 0))'))), ST_GeomFromText('polygon ((0 0, 1 0, 0 1, 0 0))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_PolyFromWKB extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_PolyFromWKB.class.getName());
+
+  public BytesWritable evaluate(BytesWritable wkb) throws UDFArgumentException {
+    return evaluate(wkb, 0);
+  }
+
+  public BytesWritable evaluate(BytesWritable wkb, int wkid) throws UDFArgumentException {
+
+    try {
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      byte[] byteArr = wkb.getBytes();
+      ByteBuffer byteBuf = ByteBuffer.allocate(byteArr.length);
+      byteBuf.put(byteArr);
+      OGCGeometry ogcObj = OGCGeometry.fromBinary(byteBuf);
+      ogcObj.setSpatialReference(spatialReference);
+      if (ogcObj.geometryType().equals("Polygon")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LOG.error(e.getMessage());
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Polygon.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Polygon.java
new file mode 100755
index 00000000000..8cf407e3176
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Polygon.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Polygon",
+    value = "_FUNC_(x, y, [x, y]*) - constructor for 2D polygon\n"
+        + "_FUNC_('polygon( ... )') - constructor for 2D polygon",
+    extended = "Example:\n" + "  SELECT _FUNC_(1, 1, 1, 4, 4, 4, 4, 1) from src LIMIT 1;  -- creates a rectangle\n"
+        + "  SELECT _FUNC_('polygon ((1 1, 4 1, 1 4))') from src LIMIT 1;  -- creates a triangle")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_Polygon('polygon ((0 0, 10 0, 0 10, 0 0))')) from onerow",
+//			result = "ST_POLYGON"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_GeometryType(ST_Polygon('polygon ((0 0, 8 0, 0 8, 0 0), (1 1, 1 5, 5 1, 1 1))')) from onerow",
+//			result = "ST_POLYGON"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Equals(ST_Polygon('polygon ((0 0, 10 0, 0 10, 0 0))'), ST_GeomFromText('polygon ((0 0, 10 0, 0 10, 0 0))')) from onerow",
+//			result = "true"
+//			)
+//		}
+//	)
+
+public class ST_Polygon extends ST_Geometry {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_Polygon.class.getName());
+
+  // Number-pairs constructor
+  public BytesWritable evaluate(DoubleWritable... xyPairs) throws UDFArgumentLengthException {
+
+    if (xyPairs == null || xyPairs.length < 6 || xyPairs.length % 2 != 0) {
+      LogUtils.Log_VariableArgumentLengthXY(LOG);
+      return null;
+    }
+
+    try {
+      double xStart = xyPairs[0].get(), yStart = xyPairs[1].get();
+      String wkt = "polygon((" + xStart + " " + yStart;
+
+      int i; // index persists after first loop
+      for (i = 2; i < xyPairs.length; i += 2) {
+        wkt += ", " + xyPairs[i] + " " + xyPairs[i + 1];
+      }
+      double xEnd = xyPairs[i - 2].get(), yEnd = xyPairs[i - 1].get();
+      // This counts on the same string getting parsed to double exactly equally
+      if (xEnd != xStart || yEnd != yStart)
+        wkt += ", " + xStart + " " + yStart;  // close the ring
+
+      wkt += "))";
+
+      return evaluate(new Text(wkt));
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_Polygon: " + e);
+      return null;
+    }
+  }
+
+  // WKT constructor - can use SetSRID on constructed polygon
+  public BytesWritable evaluate(Text wkwrap) throws UDFArgumentException {
+    String wkt = wkwrap.toString();
+    try {
+      OGCGeometry ogcObj = OGCGeometry.fromText(wkt);
+      ogcObj.setSpatialReference(null);
+      if (ogcObj.geometryType().equals("Polygon")) {
+        return GeometryUtils.geometryToEsriShapeBytesWritable(ogcObj);
+      } else {
+        LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POLYGON, GeometryUtils.OGCType.UNKNOWN);
+        return null;
+      }
+    } catch (Exception e) {  // IllegalArgumentException, GeometryException
+      LogUtils.Log_InvalidText(LOG, wkt);
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Relate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Relate.java
new file mode 100755
index 00000000000..0c2ac0d1a6a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Relate.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Relate",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return true if ST_Geometry1 has the specified DE-9IM relationship with ST_Geometry2",
+    extended = "Example:\n"
+        + "  SELECT _FUNC_(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1), '****T****') from src LIMIT 1;  -- true\n"
+        + "  SELECT _FUNC_(st_polygon(2,0, 2,1, 3,1), ST_Polygon(1,1, 1,4, 4,4, 4,1), 'T********') from src LIMIT 1;  -- false\n"
+        + "  SELECT _FUNC_(st_linestring(0,0, 3,3), ST_linestring(1,1, 4,4), 'T********') from src LIMIT 1;  -- true\n"
+        + "  SELECT _FUNC_(st_linestring(0,0, 3,3), ST_linestring(1,1, 4,4), '****T****') from src LIMIT 1;  -- false\n")
+
+public class ST_Relate extends ST_Geometry {
+
+  final BooleanWritable resultBoolean = new BooleanWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Relate.class.getName());
+
+  public BooleanWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2, String relation) {
+    if (geometryref1 == null || geometryref2 == null || relation == null || geometryref1.getLength() == 0
+        || geometryref2.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+    if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
+      LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
+      return null;
+    }
+
+    OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
+    OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
+    if (ogcGeom1 == null || ogcGeom2 == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      resultBoolean.set(ogcGeom1.relate(ogcGeom2, relation));
+      return resultBoolean;
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_Relate: " + e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SRID.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SRID.java
new file mode 100755
index 00000000000..9781cb3d06f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SRID.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_SRID",
+    value = "_FUNC_(ST_Geometry) - get the Spatial Reference ID of the geometry",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1  -- returns SRID 0")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_SRID(ST_SetSRID(ST_Point(1.1, 2.2), 4326)) FROM onerow",
+//			result = "4326"
+//		)
+//	}
+//)
+
+public class ST_SRID extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_SRID.class.getName());
+
+  IntWritable resultInt = new IntWritable();
+
+  public IntWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    resultInt.set(GeometryUtils.getWKID(geomref));
+    return resultInt;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SetSRID.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SetSRID.java
new file mode 100644
index 00000000000..e7ffd30d826
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SetSRID.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_SetSRID",
+    value = "_FUNC_(<ST_Geometry>, SRID) - set the Spatial Reference ID of the geometry",
+    extended = "Example:\n" + "  > SELECT _FUNC_(ST_SetSRID(ST_Point(1.5, 2.5), 4326)) FROM src LIMIT 1;\n"
+        + "  -- create a point and then set its SRID to 4326")
+
+public class ST_SetSRID extends ST_Geometry {
+  static final Logger LOG = LoggerFactory.getLogger(ST_SetSRID.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geomref, IntWritable wkwrap) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    // just return the geometry ref without setting anything if wkid is null
+    if (wkwrap == null) {
+      return geomref;
+    }
+
+    int wkid = wkwrap.get();
+    if (GeometryUtils.getWKID(geomref) != wkid) {
+      GeometryUtils.setWKID(geomref, wkid);
+    }
+
+    return geomref;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_StartPoint.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_StartPoint.java
new file mode 100644
index 00000000000..8d876dfb2cd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_StartPoint.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.MultiPath;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_StartPoint",
+    value = "_FUNC_(geometry) - returns the first point of an ST_Linestring",
+    extended = "Example:\n"
+        + "  > SELECT _FUNC_(ST_LineString(1.5,2.5, 3.0,2.2)) FROM src LIMIT 1;  -- POINT(1.5 2.5)\n")
+
+public class ST_StartPoint extends ST_GeometryAccessor {
+  static final Logger LOG = LoggerFactory.getLogger(ST_StartPoint.class.getName());
+
+  /**
+   * Return the first point of the ST_Linestring.
+   * @param geomref hive geometry bytes
+   * @return byte-reference of the first ST_Point
+   */
+  public BytesWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    if (GeometryUtils.getType(geomref) == GeometryUtils.OGCType.ST_LINESTRING) {
+      MultiPath lines = (MultiPath) (ogcGeometry.getEsriGeometry());
+      int wkid = GeometryUtils.getWKID(geomref);
+      SpatialReference spatialReference = null;
+      if (wkid != GeometryUtils.WKID_UNKNOWN) {
+        spatialReference = SpatialReference.create(wkid);
+      }
+      return GeometryUtils
+          .geometryToEsriShapeBytesWritable(OGCGeometry.createFromEsriGeometry(lines.getPoint(0), spatialReference));
+    } else {
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_LINESTRING, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SymmetricDiff.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SymmetricDiff.java
new file mode 100755
index 00000000000..3d72b5180fd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_SymmetricDiff.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_SymmetricDiff",
+    value = "_FUNC_(ST_Geometry1, ST_Geometry2) - return the symmetric difference between ST_Geometry1 & ST_Geometry2",
+    extended = "Examples:\n"
+        + " > SELECT ST_AsText(_FUNC_(ST_LineString('linestring(0 2, 2 2)'), ST_LineString('linestring(1 2, 3 2)'))) FROM onerow; \n"
+        + " MULTILINESTRING((0 2, 1 2), (2 2, 3 2))\n"
+        + " > SELECT ST_AsText(_FUNC_(ST_SymmetricDiff(ST_Polygon('polygon((0 0, 2 0, 2 2, 0 2, 0 0))'), ST_Polygon('polygon((1 1, 3 1, 3 3, 1 3, 1 1))'))) from onerow;\n"
+        + " MULTIPOLYGON (((0 0, 2 0, 2 1, 1 1, 1 2, 0 2, 0 0)), ((3 1, 3 3, 1 3, 1 2, 2 2, 2 1, 3 1)))\n")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_Equals(ST_SymmetricDiff(ST_LineString('linestring(0 2, 2 2)'), ST_LineString('linestring(1 2, 3 2)')), ST_GeomFromText('multilinestring((0 2, 1 2), (2 2, 3 2))')) FROM onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_Equals(ST_SymmetricDiff(ST_Polygon('polygon((0 0, 2 0, 2 2, 0 2, 0 0))'), ST_Polygon('polygon((1 1, 3 1, 3 3, 1 3, 1 1))')), ST_MultiPolygon('multipolygon(((0 0, 2 0, 2 1, 1 1, 1 2, 0 2, 0 0)), ((3 1, 3 3, 1 3, 1 2, 2 2, 2 1, 3 1)))')) FROM onerow",
+//			result = "true"
+//			),
+//		@HivePdkUnitTest(
+//			query = "SELECT ST_SymmetricDiff(ST_Point(0,0), null) from onerow",
+//			result = "null"
+//			)
+//		}
+//	)
+
+public class ST_SymmetricDiff extends ST_GeometryProcessing {
+
+  static final Logger LOG = LoggerFactory.getLogger(ST_SymmetricDiff.class.getName());
+
+  public BytesWritable evaluate(BytesWritable geometryref1, BytesWritable geometryref2) {
+    if (geometryref1 == null || geometryref2 == null || geometryref1.getLength() == 0
+        || geometryref2.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    if (!GeometryUtils.compareSpatialReferences(geometryref1, geometryref2)) {
+      LogUtils.Log_SRIDMismatch(LOG, geometryref1, geometryref2);
+      return null;
+    }
+
+    OGCGeometry ogcGeom1 = GeometryUtils.geometryFromEsriShape(geometryref1);
+    OGCGeometry ogcGeom2 = GeometryUtils.geometryFromEsriShape(geometryref2);
+    if (ogcGeom1 == null || ogcGeom2 == null) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    try {
+      OGCGeometry diffGeometry = ogcGeom1.symDifference(ogcGeom2);
+      return GeometryUtils.geometryToEsriShapeBytesWritable(diffGeometry);
+    } catch (Exception e) {
+      LogUtils.Log_InternalError(LOG, "ST_SymmetricDiff: " + e);
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Touches.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Touches.java
new file mode 100755
index 00000000000..7881c48430d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Touches.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorSimpleRelation;
+import com.esri.core.geometry.OperatorTouches;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Touches",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 touches geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(st_point(1, 2), st_polygon(1, 1, 1, 4, 4, 4, 4, 1)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_point(8, 8), st_polygon(1, 1, 1, 4, 4, 4, 4, 1)) from src LIMIT 1;  -- return false")
+
+public class ST_Touches extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorTouches.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s touches %s", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Union.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Union.java
new file mode 100755
index 00000000000..84788188440
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Union.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.udf.esri.GeometryUtils.OGCType;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Union",
+    value = "_FUNC_(ST_Geometry, ST_Geometry, ...) - returns an ST_Geometry as the union of the supplied ST_Geometries",
+    extended =
+        "Example: SELECT ST_AsText(ST_Union(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1), ST_Polygon(4, 1, 4, 4, 4, 8, 8, 1))) FROM onerow;\n"
+            + "MULTIPOLYGON (((4 1, 8 1, 4 8, 4 4, 1 4, 1 1, 4 1)))")
+//@HivePdkUnitTests(
+//		cases = {
+//				@HivePdkUnitTest(
+//						query = "SELECT ST_AsText(ST_Union(ST_Point(1.1, 2.2), ST_Point(3.3, 4.4))) FROM onerow",
+//						result = "MULTIPOINT (1.1 2.2, 3.3 4.4)"
+//						),
+//				@HivePdkUnitTest(
+//						query = "SELECT ST_AsText(ST_Union(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1), ST_Polygon(4,1, 4,8, 8,1))) FROM onerow",
+//						result = "MULTIPOLYGON (((4 1, 8 1, 4 8, 4 4, 1 4, 1 1, 4 1)))"
+//						),
+//				@HivePdkUnitTest(
+//						query = "SELECT ST_AsText(ST_Union(ST_Point(1.1, 2.2), ST_Point(3.3, 4.4), ST_Point(5.5, 6.6), ST_Point(1.1, 2.2))) FROM onerow",
+//						result = "MULTIPOINT (1.1 2.2, 3.3 4.4, 5.5 6.6)"
+//						)
+//			}
+//		)
+public class ST_Union extends ST_GeometryProcessing {
+  static final Logger LOG = LoggerFactory.getLogger(ST_Union.class.getName());
+
+  public BytesWritable evaluate(BytesWritable... geomrefs) {
+    // validate arguments
+    if (geomrefs == null || geomrefs.length < 2) {
+      LogUtils.Log_VariableArgumentLength(LOG);
+    }
+
+    int firstWKID = 0;
+
+    SpatialReference spatialRef = null;
+
+    // validate spatial references and geometries first
+    for (int i = 0; i < geomrefs.length; i++) {
+
+      BytesWritable geomref = geomrefs[i];
+
+      if (geomref == null || geomref.getLength() == 0) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return null;
+      }
+
+      if (i == 0) {
+        firstWKID = GeometryUtils.getWKID(geomref);
+        if (firstWKID != GeometryUtils.WKID_UNKNOWN) {
+          spatialRef = SpatialReference.create(firstWKID);
+        }
+      } else if (firstWKID != GeometryUtils.getWKID(geomref)) {
+        LogUtils.Log_SRIDMismatch(LOG, geomrefs[0], geomref);
+        return null;
+      }
+    }
+
+    // now build geometry array to pass to GeometryEngine.union
+    Geometry[] geomsToUnion = new Geometry[geomrefs.length];
+
+    for (int i = 0; i < geomrefs.length; i++) {
+      //HiveGeometry hiveGeometry = GeometryUtils.geometryFromEsriShape(geomrefs[i]);
+      OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomrefs[i]);
+
+      // if (i==0){   // get from ogcGeometry rather than re-create above?
+      // 	spatialRef = hiveGeometry.spatialReference;
+      // }
+
+      if (ogcGeometry == null) {
+        LogUtils.Log_ArgumentsNull(LOG);
+        return null;
+      }
+
+      geomsToUnion[i] = ogcGeometry.getEsriGeometry();
+    }
+
+    try {
+      Geometry unioned = GeometryEngine.union(geomsToUnion, spatialRef);
+
+      // we have to infer the type of the differenced geometry because we don't know
+      // if it's going to end up as a single or multi-part geometry
+      OGCType inferredType = GeometryUtils.getInferredOGCType(unioned);
+
+      return GeometryUtils.geometryToEsriShapeBytesWritable(unioned, firstWKID, inferredType);
+    } catch (Exception e) {
+      LogUtils.Log_ExceptionThrown(LOG, "GeometryEngine.union", e);
+      return null;
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Within.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Within.java
new file mode 100755
index 00000000000..8544f8fd69a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Within.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.OperatorSimpleRelation;
+import com.esri.core.geometry.OperatorWithin;
+import org.apache.hadoop.hive.ql.exec.Description;
+
+@Description(name = "ST_Within",
+    value = "_FUNC_(geometry1, geometry2) - return true if geometry1 is within geometry2",
+    extended = "Example:\n"
+        + "SELECT _FUNC_(st_point(2, 3), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1;  -- return true\n"
+        + "SELECT _FUNC_(st_point(8, 8), st_polygon(1,1, 1,4, 4,4, 4,1)) from src LIMIT 1;  -- return false")
+
+public class ST_Within extends ST_GeometryRelational {
+
+  @Override
+  protected OperatorSimpleRelation getRelationOperator() {
+    return OperatorWithin.local();
+  }
+
+  @Override
+  public String getDisplayString(String[] args) {
+    return String.format("returns true if %s within %s", args[0], args[1]);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_X.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_X.java
new file mode 100755
index 00000000000..d3a75208c81
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_X.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_X",
+    value = "_FUNC_(point) - returns the X coordinate of point",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  --  1.5")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_X(ST_Point(1,2)) from onerow",
+//			result = "1"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_X(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_X(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_X extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_X.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      return null;
+    }
+
+    switch (GeometryUtils.getType(geomref)) {
+    case ST_POINT:
+      OGCPoint pt = (OGCPoint) ogcGeometry;
+      resultDouble.set(pt.X());
+      return resultDouble;
+    default:
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Y.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Y.java
new file mode 100755
index 00000000000..e748557e5dc
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Y.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Y",
+    value = "_FUNC_(point) - returns the Y coordinate of point",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  --  2.5")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Y(ST_Point(1,2)) from onerow",
+//			result = "2"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Y(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Y(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Y extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Y.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      return null;
+    }
+
+    switch (GeometryUtils.getType(geomref)) {
+    case ST_POINT:
+      OGCPoint pt = (OGCPoint) ogcGeometry;
+      resultDouble.set(pt.Y());
+      return resultDouble;
+    default:
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Z.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Z.java
new file mode 100755
index 00000000000..58a6252a480
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/ST_Z.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.esri.core.geometry.ogc.OGCPoint;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Description(name = "ST_Z",
+    value = "_FUNC_(point) - returns the Z coordinate of point",
+    extended = "Example:\n" + "  SELECT _FUNC_(ST_Point(1.5, 2.5)) FROM src LIMIT 1;  --  1.5")
+//@HivePdkUnitTests(
+//	cases = {
+//		@HivePdkUnitTest(
+//			query = "select ST_Z(ST_Point(1,2,3)) from onerow",
+//			result = "3.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Z(ST_PointZ(0., 3., 1)) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Z(ST_Point('pointzm (0. 3. 1. 2.)')) from onerow",
+//			result = "1.0"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Z(ST_Point(1,2)) from onerow",
+//			result = "null"
+//			),
+//		@HivePdkUnitTest(
+//			query = "select ST_Z(null) from onerow",
+//			result = "null"
+//			)
+//	}
+//)
+
+public class ST_Z extends ST_GeometryAccessor {
+  final DoubleWritable resultDouble = new DoubleWritable();
+  static final Logger LOG = LoggerFactory.getLogger(ST_Z.class.getName());
+
+  public DoubleWritable evaluate(BytesWritable geomref) {
+    if (geomref == null || geomref.getLength() == 0) {
+      LogUtils.Log_ArgumentsNull(LOG);
+      return null;
+    }
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
+    if (ogcGeometry == null) {
+      return null;
+    }
+    if (!ogcGeometry.is3D()) {
+      LogUtils.Log_Not3D(LOG);
+      return null;
+    }
+
+    switch (GeometryUtils.getType(geomref)) {
+    case ST_POINT:
+      OGCPoint pt = (OGCPoint) ogcGeometry;
+      resultDouble.set(pt.Z());
+      return resultDouble;
+    default:
+      LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_POINT, GeometryUtils.getType(geomref));
+      return null;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java
new file mode 100644
index 00000000000..e4b2dbbed55
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/BaseJsonSerDe.java
@@ -0,0 +1,441 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri.serde;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.udf.esri.GeometryUtils;
+import org.apache.hadoop.hive.ql.udf.esri.shims.HiveShims;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.JsonToken;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.lazy.LazyPrimitive;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import java.util.TimeZone;
+
+abstract public class BaseJsonSerDe extends AbstractSerDe {
+  static final Logger LOG = LoggerFactory.getLogger(BaseJsonSerDe.class.getName());
+
+  static protected JsonFactory jsonFactory = new JsonFactory();
+  static protected TimeZone tz = TimeZone.getDefault();
+
+  protected int numColumns;
+  protected int geometryColumn = -1;
+  protected ArrayList<String> columnNames;
+  protected ArrayList<ObjectInspector> columnOIs;
+  protected boolean[] columnSet;
+  protected StructObjectInspector rowOI; // contains the type information for the fields returned
+  protected String attrLabel = "attributes";  // "properties"
+
+  /* rowBase keeps a base copy of the Writable for each field so they can be reused for
+   * all records. When deserialize is called, row is initially nulled out. Then for each attribute
+   * found in the JSON record the Writable reference is copied from rowBase to row
+   * and set to the appropriate value.  Then row is returned.  This why values don't linger from
+   * previous records.
+   */ ArrayList<Writable> rowBase;
+  ArrayList<Writable> row;
+
+  @Override
+  public void initialize(Configuration cfg, Properties tbl, Properties partitionProperties) throws SerDeException {
+
+    geometryColumn = -1;
+
+    // Read the configuration parameters
+    String columnNameProperty = tbl.getProperty(HiveShims.serdeConstants.LIST_COLUMNS);
+    String columnTypeProperty = tbl.getProperty(HiveShims.serdeConstants.LIST_COLUMN_TYPES);
+
+    ArrayList<TypeInfo> typeInfos = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
+
+    columnNames = new ArrayList<String>();
+    columnNames.addAll(Arrays.asList(columnNameProperty.toLowerCase().split(",")));
+
+    numColumns = columnNames.size();
+
+    columnOIs = new ArrayList<ObjectInspector>(numColumns);
+    columnSet = new boolean[numColumns];
+
+    for (int c = 0; c < numColumns; c++) {
+
+      TypeInfo colTypeInfo = typeInfos.get(c);
+
+      if (colTypeInfo.getCategory() != Category.PRIMITIVE) {
+        throw new SerDeException("Only primitive field types are accepted");
+      }
+
+      if (colTypeInfo.getTypeName().equals("binary")) {
+
+        if (geometryColumn >= 0) {
+          // only one column can be defined as binary for geometries
+          throw new SerDeException("Multiple binary columns defined.  Define only one binary column for geometries");
+        }
+
+        columnOIs.add(GeometryUtils.geometryTransportObjectInspector);
+        geometryColumn = c;
+      } else {
+        columnOIs.add(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(colTypeInfo));
+      }
+    }
+
+    // standardStruct uses ArrayList to store the row.
+    rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnOIs);
+
+    // constructing the row objects, etc, which will be reused for all rows.
+    rowBase = new ArrayList<Writable>(numColumns);
+    row = new ArrayList<Writable>(numColumns);
+
+    // set each value in rowBase to the writable that corresponds with its PrimitiveObjectInspector
+    for (int c = 0; c < numColumns; c++) {
+
+      PrimitiveObjectInspector poi = (PrimitiveObjectInspector) columnOIs.get(c);
+      Writable writable;
+
+      try {
+        writable = (Writable) poi.getPrimitiveWritableClass().newInstance();
+      } catch (InstantiationException e) {
+        throw new SerDeException("Error creating Writable from ObjectInspector", e);
+      } catch (IllegalAccessException e) {
+        throw new SerDeException("Error creating Writable from ObjectInspector", e);
+      }
+
+      rowBase.add(writable);
+      row.add(null); // default all values to null
+    }
+  }  // /initialize
+
+  @Override
+  public Object deserialize(Writable json_in) throws SerDeException {
+    Text json = (Text) json_in;
+
+    // null out array because we reuse it and we don't want values persisting
+    // from the last record
+    for (int i = 0; i < numColumns; i++)
+      row.set(i, null);
+
+    try {
+      JsonParser parser = jsonFactory.createJsonParser(json.toString());
+
+      JsonToken token = parser.nextToken();
+
+      while (token != null) {
+
+        if (token == JsonToken.START_OBJECT) {
+          if ("geometry".equals(parser.getCurrentName())) {
+            if (geometryColumn > -1) {
+              // create geometry and insert into geometry field
+              OGCGeometry ogcGeom = parseGeom(parser);
+              row.set(geometryColumn, ogcGeom == null ? null : GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom));
+            } else {
+              // no geometry in select field set, don't even bother parsing
+              parser.skipChildren();
+            }
+          } else if (attrLabel.equals(parser.getCurrentName())) {
+
+            token = parser.nextToken();
+
+            while (token != JsonToken.END_OBJECT && token != null) {
+
+              // hive makes all column names in the queries column list lower case
+              String name = parser.getText().toLowerCase();
+
+              parser.nextToken();
+
+              // figure out which column index corresponds with the attribute name
+              int fieldIndex = columnNames.indexOf(name);
+
+              if (fieldIndex >= 0) {
+                setRowFieldFromParser(fieldIndex, parser);
+              }
+
+              token = parser.nextToken();
+            }
+
+            token = parser.nextToken();
+          }
+        }
+
+        token = parser.nextToken();
+      }
+
+    } catch (JsonParseException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    } catch (IOException e) {
+      // TODO Auto-generated catch block
+      e.printStackTrace();
+    }
+
+    return row;
+  }
+
+  @Override
+  public ObjectInspector getObjectInspector() throws SerDeException {
+    return rowOI;
+  }
+
+  @Override
+  public SerDeStats getSerDeStats() {
+    return null;
+  }
+
+  @Override
+  public Class<? extends Writable> getSerializedClass() {
+    return Text.class;
+  }
+
+  @Override
+  public Writable serialize(Object obj, ObjectInspector oi) throws SerDeException {
+
+    StandardStructObjectInspector structOI = (StandardStructObjectInspector) oi;
+
+    // get list of writables, one for each field in the row
+    List<Object> fieldWritables = structOI.getStructFieldsDataAsList(obj);
+
+    StringWriter writer = new StringWriter();
+
+    try {
+      JsonGenerator jsonGen = jsonFactory.createJsonGenerator(writer);
+
+      jsonGen.writeStartObject();
+
+      // first write attributes
+      jsonGen.writeObjectFieldStart(attrLabel);
+
+      for (int i = 0; i < fieldWritables.size(); i++) {
+        if (i == geometryColumn)
+          continue; // skip geometry, it comes later
+
+        try {
+          generateJsonFromValue(fieldWritables.get(i), i, jsonGen);
+        } catch (JsonProcessingException e) {
+          e.printStackTrace();
+        } catch (IOException e) {
+          e.printStackTrace();
+        }
+      }
+
+      jsonGen.writeEndObject();
+
+      // if geometry column exists, write it
+      if (geometryColumn > -1) {
+        Object got = fieldWritables.get(geometryColumn);
+        if (got == null) {
+          jsonGen.writeObjectField("geometry", null);
+        } else {
+          BytesWritable bytesWritable = null;
+          if (got instanceof BytesWritable)
+            bytesWritable = (BytesWritable) got;
+          else  // SparkSQL, #97
+            bytesWritable = new BytesWritable((byte[]) got);  // idea: avoid extra object
+          OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(bytesWritable);
+          jsonGen.writeRaw(",\"geometry\":" + outGeom(ogcGeometry));
+        }
+      }
+
+      jsonGen.writeEndObject();
+
+      jsonGen.close();
+
+    } catch (JsonGenerationException e) {
+      LOG.error("Error generating JSON", e);
+      return null;
+    } catch (IOException e) {
+      LOG.error("Error generating JSON", e);
+      return null;
+    }
+
+    return new Text(writer.toString());
+  }
+
+  /**
+   * Send to the generator, the value of the cell, using column type
+   *
+   * @param value The attribute value as the object given by Hive
+   * @param fieldIndex column index of field in row
+   * @param jsonGen JsonGenerator
+   * @throws JsonProcessingException
+   * @throws IOException
+   */
+  private void generateJsonFromValue(Object value, int fieldIndex, JsonGenerator jsonGen)
+      throws JsonProcessingException, IOException {
+    String label = columnNames.get(fieldIndex);
+    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) this.columnOIs.get(fieldIndex);
+    if (value == null) {
+      jsonGen.writeObjectField(label, null);
+    } else if (value instanceof LazyPrimitive<?, ?>) {  // have seen LazyString, #25
+      generateJsonFromLazy((LazyPrimitive<?, ?>) value, fieldIndex, label, poi, jsonGen);
+    } else if (value instanceof Writable) {
+      generateJsonFromWritable((Writable) value, fieldIndex, label, poi, jsonGen);
+    } else {  // SparkSQL, #97
+      jsonGen.writeObjectField(label, value);
+    }
+  }
+
+  private void generateJsonFromLazy(LazyPrimitive<?, ?> value, int fieldIndex, String label,
+      PrimitiveObjectInspector poi, JsonGenerator jsonGen) throws IOException {
+    generateJsonFromWritable(value.getWritableObject(), fieldIndex, label, poi, jsonGen);
+  }
+
+  private void generateJsonFromWritable(Writable value, int fieldIndex, String label, PrimitiveObjectInspector poi,
+      JsonGenerator jsonGen) throws IOException {
+    Object prim = poi.getPrimitiveJavaObject(value);
+    Long epoch = HiveShims.getPrimitiveEpoch(prim, tz);
+    if (epoch == null) {  // anything but a recognized DATE or TIMESTAMP
+      jsonGen.writeObjectField(label, prim);
+    } else {
+      jsonGen.writeObjectField(label, epoch);
+    }
+  }
+
+  // Write OGCGeometry to JSON
+  abstract protected String outGeom(OGCGeometry geom);
+
+  // Parse OGCGeometry from JSON
+  abstract protected OGCGeometry parseGeom(JsonParser parser);
+
+  private java.sql.Date parseDate(JsonParser parser) throws IOException {
+    java.sql.Date jsd = null;
+    if (JsonToken.VALUE_NUMBER_INT.equals(parser.getCurrentToken())) {
+      // DateWritable#daysToMillis adjusts the numerical/epoch time
+      // to midnight in the local time zone.	See HIVE-12192.
+      // Attempt to compensate, when date provided as epoch, which is unambiguously UTC.
+      long epoch = parser.getLongValue();
+      jsd = new java.sql.Date(epoch - tz.getOffset(epoch));
+    } else
+      try {
+        long epoch = parseTime(parser.getText(), "yyyy-MM-dd");
+        jsd = new java.sql.Date(epoch + 43200000);  // midday rather than midnight
+      } catch (java.text.ParseException e) {
+        // null
+      }
+    return jsd;
+  }
+
+  private java.sql.Timestamp parseTime(JsonParser parser) throws IOException {
+    java.sql.Timestamp jst = null;
+    if (JsonToken.VALUE_NUMBER_INT.equals(parser.getCurrentToken())) {
+      long epoch = parser.getLongValue();
+      jst = new java.sql.Timestamp(epoch);
+    } else {
+      String value = parser.getText();
+      int point = value.indexOf('.');
+      String dateStr = (point < 0) ? value : value.substring(0, point + 4);
+      String[] formats = { "yyyy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm", "yyyy-MM-dd" };
+      for (String format : formats) {
+        try {
+          jst = new java.sql.Timestamp(parseTime(dateStr, format));
+          break;
+        } catch (java.text.ParseException e) {
+          // remain null after this attempted format
+        }
+      }
+    }  // else String value
+    return jst;
+  }
+
+  private long parseTime(String value, String format) throws java.text.ParseException {  // epoch
+    java.text.SimpleDateFormat dtFmt = new java.text.SimpleDateFormat(format);
+    dtFmt.setTimeZone(TimeZone.getTimeZone("UTC"));
+    return dtFmt.parse(value).getTime();
+  }
+
+  /**
+   * Copies the Writable at fieldIndex from rowBase to row, then sets the value of the Writable
+   * to the value in parser
+   *
+   * @param fieldIndex column index of field in row
+   * @param parser JsonParser pointing to the attribute
+   * @throws JsonParseException
+   * @throws IOException
+   */
+  private void setRowFieldFromParser(int fieldIndex, JsonParser parser) throws JsonParseException, IOException {
+
+    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) this.columnOIs.get(fieldIndex);
+    if (JsonToken.VALUE_NULL == parser.getCurrentToken())
+      return;  // leave the row-cell as null
+
+    // set the field in the row to the writable from rowBase
+    row.set(fieldIndex, rowBase.get(fieldIndex));
+
+    switch (poi.getPrimitiveCategory()) {
+    case BYTE:
+      ((ByteWritable) row.get(fieldIndex)).set(parser.getByteValue());
+      break;
+    case SHORT:
+      ((ShortWritable) row.get(fieldIndex)).set(parser.getShortValue());
+      break;
+    case INT:
+      ((IntWritable) row.get(fieldIndex)).set(parser.getIntValue());
+      break;
+    case LONG:
+      ((LongWritable) row.get(fieldIndex)).set(parser.getLongValue());
+      break;
+    case DOUBLE:
+      ((DoubleWritable) row.get(fieldIndex)).set(parser.getDoubleValue());
+      break;
+    case FLOAT:
+      ((FloatWritable) row.get(fieldIndex)).set(parser.getFloatValue());
+      break;
+    case BOOLEAN:
+      ((BooleanWritable) row.get(fieldIndex)).set(parser.getBooleanValue());
+      break;
+    case DATE:    // DateWritable stores days not milliseconds.
+      HiveShims.setDateWritable(row.get(fieldIndex), parseDate(parser));
+      break;
+    case TIMESTAMP:
+      HiveShims.setTimeWritable(row.get(fieldIndex), parseTime(parser));
+      break;
+    default:    // STRING/unrecognized
+      ((Text) row.get(fieldIndex)).set(parser.getText());
+      break;
+    }
+  }
+
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/EsriJsonSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/EsriJsonSerDe.java
new file mode 100644
index 00000000000..74d4f54cfef
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/EsriJsonSerDe.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri.serde;
+
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.MapGeometry;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.core.JsonParser;
+
+public class EsriJsonSerDe extends BaseJsonSerDe {
+
+  @Override
+  protected String outGeom(OGCGeometry geom) {
+    return geom.asJson();
+  }
+
+  @Override
+  protected OGCGeometry parseGeom(JsonParser parser) {
+    MapGeometry mapGeom = GeometryEngine.jsonToGeometry(parser);
+    return OGCGeometry.createFromEsriGeometry(mapGeom.getGeometry(), mapGeom.getSpatialReference());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/GeoJsonSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/GeoJsonSerDe.java
new file mode 100644
index 00000000000..a21aa69f46e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/GeoJsonSerDe.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri.serde;
+
+import com.esri.core.geometry.ogc.OGCGeometry;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import java.io.IOException;
+
+public class GeoJsonSerDe extends BaseJsonSerDe {
+
+  ObjectMapper mapper = null;
+
+  public GeoJsonSerDe() {
+    super();
+    attrLabel = "properties";
+    mapper = new ObjectMapper();
+  }
+
+  @Override
+  protected String outGeom(OGCGeometry geom) {
+    return geom.asGeoJson();
+  }
+
+  @Override
+  protected OGCGeometry parseGeom(JsonParser parser) {
+    try {
+      ObjectNode node = mapper.readTree(parser);
+      return OGCGeometry.fromGeoJson(node.toString());
+    } catch (JsonProcessingException e1) {
+      e1.printStackTrace();      // TODO Auto-generated catch block
+    } catch (IOException e1) {
+      e1.printStackTrace();      // TODO Auto-generated catch block
+    }
+    return null;  // ?
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/JsonSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/JsonSerde.java
new file mode 100755
index 00000000000..503faf3af8a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/serde/JsonSerde.java
@@ -0,0 +1,21 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// Obsoleted
+// package com.esri.hadoop.hive.serde;
+// @Deprecated in v1.2
+// public class JsonSerde extends EsriJsonSerDe {}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/shims/HiveShims.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/shims/HiveShims.java
new file mode 100644
index 00000000000..762692ec1ad
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/esri/shims/HiveShims.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri.shims;
+
+import java.lang.reflect.Method;
+import java.util.TimeZone;
+
+public class HiveShims {
+
+  /**
+   * This class is supplied for compatibility between Hive versions.
+   * At 0.10 the serde constants were moved to another package.  Also,
+   * at 0.11 the previous class will be re-added for backwards
+   * compatibility, but deprecated.
+   *
+   */
+  public static class serdeConstants {
+    public static final String LIST_COLUMNS;
+    public static final String LIST_COLUMN_TYPES;
+
+    static {
+      Class<?> clazz = null;
+
+      try {
+        // Hive 0.10 and above constants
+        clazz = Class.forName("org.apache.hadoop.hive.serde.serdeConstants");
+      } catch (ClassNotFoundException e) {
+        try {
+          // Hive 0.9 and below constants
+          clazz = Class.forName("org.apache.hadoop.hive.serde.Constants");
+        } catch (ClassNotFoundException e1) {
+          // not much we can do here
+        }
+      }
+
+      LIST_COLUMNS = getAsStringOrNull(clazz, "LIST_COLUMNS");
+      LIST_COLUMN_TYPES = getAsStringOrNull(clazz, "LIST_COLUMN_TYPES");
+    }
+
+    static String getAsStringOrNull(Class<?> clazz, String constant) {
+      try {
+        return (String) clazz.getField(constant).get(null);
+      } catch (Exception e) {
+        return null;
+      }
+    }
+  }
+
+  /**
+   * Classes o.a.h.h.common.type Date and Timestamp were introduced in Hive-3.1 version.
+   */
+  public static Long getPrimitiveEpoch(Object prim, TimeZone tz) {
+    if (prim instanceof java.sql.Timestamp) {
+      return ((java.sql.Timestamp) prim).getTime();
+    } else if (prim instanceof java.util.Date) {
+      return ((java.util.Date) prim).getTime();
+    } else {
+      try {
+        Class<?> dtClazz = Class.forName("org.apache.hadoop.hive.common.type.Date");
+        if (prim.getClass() == dtClazz) {
+          Method dtGetImpl = dtClazz.getMethod("toEpochMilli");
+          return (java.lang.Long) (dtGetImpl.invoke(prim));
+        } else {
+          Class<?> ttClazz = Class.forName("org.apache.hadoop.hive.common.type.Timestamp");
+          if (prim.getClass() == ttClazz) {
+            Method ttGetImpl = ttClazz.getMethod("toEpochMilli");
+            return (java.lang.Long) (ttGetImpl.invoke(prim));
+          } else {
+            return null;
+          }
+        }
+      } catch (Exception exc) {
+        return null;
+      }
+    }
+  }
+
+  /**
+   * Type DATE was introduced in Hive-0.12 - class DateWritable in API.
+   * Class DateWritableV2 is used instead as of Hive-3.1 version.
+   */
+  public static void setDateWritable(Object dwHive, long epoch, TimeZone tz) {
+    try {                                // Hive 3.1 and above
+      Class<?> dtClazz = Class.forName("org.apache.hadoop.hive.common.type.Date");
+      Class<?> dwClazz = Class.forName("org.apache.hadoop.hive.serde2.io.DateWritableV2");
+      Method dtSetImpl = dtClazz.getMethod("setTimeInMillis", long.class);
+      Method dwSetImpl = dwClazz.getMethod("set", dtClazz);
+      Object dtObj = dtClazz.getConstructor().newInstance();
+      dtSetImpl.invoke(dtObj, epoch);
+      dwSetImpl.invoke(dwHive, dtObj);
+    } catch (Exception e1) {
+      try {                            // Hive 0.12 and above
+        Class<?> dwClazz = Class.forName("org.apache.hadoop.hive.serde2.io.DateWritable");
+        Method dwSetImpl = dwClazz.getMethod("set", java.sql.Date.class);
+        dwSetImpl.invoke(dwHive, new java.sql.Date(epoch));
+      } catch (Exception e2) {              // Hive 0.11 and below
+        // column type DATE not supported
+        throw new UnsupportedOperationException("DATE type");
+      }
+    }
+  }  // setDateWritable
+
+  /**
+   * Type DATE was introduced in Hive-0.12 - class DateWritable in API.
+   * Class DateWritableV2 is used instead as of Hive-3.1 version.
+   */
+  public static void setDateWritable(Object dwHive, java.sql.Date jsd) {
+    try {                                // Hive 3.1 and above
+      Class<?> dtClazz = Class.forName("org.apache.hadoop.hive.common.type.Date");
+      Class<?> dwClazz = Class.forName("org.apache.hadoop.hive.serde2.io.DateWritableV2");
+      Method dtSetImpl = dtClazz.getMethod("setTimeInMillis", long.class);
+      Method dwSetImpl = dwClazz.getMethod("set", dtClazz);
+      Object dtObj = dtClazz.getConstructor().newInstance();
+      dtSetImpl.invoke(dtObj, jsd.getTime());
+      dwSetImpl.invoke(dwHive, dtObj);
+    } catch (Exception e1) {
+      try {                            // Hive 0.12 and above
+        Class<?> dwClazz = Class.forName("org.apache.hadoop.hive.serde2.io.DateWritable");
+        Method dwSetImpl = dwClazz.getMethod("set", java.sql.Date.class);
+        dwSetImpl.invoke(dwHive, jsd);
+      } catch (Exception e2) {              // Hive 0.11 and below
+        // column type DATE not supported
+        throw new UnsupportedOperationException("DATE type");
+      }
+    }
+  }  // setDateWritable
+
+  /**
+   * Type TIMESTAMP was introduced in Hive-0.12 - class TimestampWritable in API.
+   * Class TimestampWritableV2 is used instead as of Hive-3.1 version.
+   */
+  public static void setTimeWritable(Object twHive, long epoch) {
+    try {                                // Hive 3.1 and above
+      Class<?> ttClazz = Class.forName("org.apache.hadoop.hive.common.type.Timestamp");
+      Class<?> twClazz = Class.forName("org.apache.hadoop.hive.serde2.io.TimestampWritableV2");
+      Method ttSetImpl = ttClazz.getMethod("setTimeInMillis", long.class);
+      Method twSetImpl = twClazz.getMethod("set", ttClazz);
+      Object ttObj = ttClazz.getConstructor().newInstance();
+      ttSetImpl.invoke(ttObj, epoch);
+      twSetImpl.invoke(twHive, ttObj);
+    } catch (Exception e1) {
+      try {                            // Hive 0.12 and above
+        Class<?> twClazz = Class.forName("org.apache.hadoop.hive.serde2.io.TimestampWritable");
+        Method twSetImpl = twClazz.getMethod("set", java.sql.Timestamp.class);
+        twSetImpl.invoke(twHive, new java.sql.Timestamp(epoch));
+      } catch (Exception e2) {              // Hive 0.11 and below
+        // column type TIMESTAMP not supported
+        throw new UnsupportedOperationException("TIMESTAMP type");
+      }
+    }
+  }  // setTimeWritable
+
+  /**
+   * Type TIMESTAMP was introduced in Hive-0.12 - class TimestampWritable in API.
+   * Class TimestampWritableV2 is used instead as of Hive-3.1 version.
+   */
+  public static void setTimeWritable(Object twHive, java.sql.Timestamp jst) {
+    long epoch = jst.getTime();
+    try {                                // Hive 3.1 and above
+      Class<?> ttClazz = Class.forName("org.apache.hadoop.hive.common.type.Timestamp");
+      Class<?> twClazz = Class.forName("org.apache.hadoop.hive.serde2.io.TimestampWritableV2");
+      Method ttSetImpl = ttClazz.getMethod("setTimeInMillis", long.class);
+      Method twSetImpl = twClazz.getMethod("set", ttClazz);
+      Object ttObj = ttClazz.getConstructor().newInstance();
+      ttSetImpl.invoke(ttObj, epoch);
+      twSetImpl.invoke(twHive, ttObj);
+    } catch (Exception e1) {
+      try {                            // Hive 0.12 and above
+        Class<?> twClazz = Class.forName("org.apache.hadoop.hive.serde2.io.TimestampWritable");
+        Method twSetImpl = twClazz.getMethod("set", java.sql.Timestamp.class);
+        twSetImpl.invoke(twHive, new java.sql.Timestamp(epoch));
+      } catch (Exception e2) {              // Hive 0.11 and below
+        // column type TIMESTAMP not supported
+        throw new UnsupportedOperationException("TIMESTAMP type");
+      }
+    }
+  }  // setTimeWritable
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStAsShape.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStAsShape.java
new file mode 100644
index 00000000000..9c56c7e95f6
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStAsShape.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.Geometry.Type;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.Point;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class TestStAsShape {
+
+  private final static double Epsilon = 0.0001;
+
+  @Test
+  public void testPointAsShape() {
+    ST_Point point = new ST_Point();
+    final double longitude = 12.224;
+    final double latitude = 51.829;
+    BytesWritable pointAsWritable = point.evaluate(new DoubleWritable(longitude), new DoubleWritable(latitude));
+    assertNotNull("The point writable must not be null!", pointAsWritable);
+
+    ST_AsShape asShape = new ST_AsShape();
+    BytesWritable shapeAsWritable = asShape.evaluate(pointAsWritable);
+    assertNotNull("The shape writable must not be null!", pointAsWritable);
+
+    byte[] esriShapeBuffer = shapeAsWritable.getBytes();
+    Geometry esriGeometry = GeometryEngine.geometryFromEsriShape(esriShapeBuffer, Type.Point);
+    assertNotNull("The geometry must not be null!", esriGeometry);
+    assertTrue("Geometry type point expected!", esriGeometry instanceof Point);
+
+    Point esriPoint = (Point) esriGeometry;
+    assertEquals("Longitude is different!", longitude, esriPoint.getX(), Epsilon);
+    assertEquals("Latitude is different!", latitude, esriPoint.getY(), Epsilon);
+  }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStCentroid.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStCentroid.java
new file mode 100644
index 00000000000..fdfffc876af
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStCentroid.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Point;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+public class TestStCentroid {
+
+  private final static double Epsilon = 0.0001;
+
+  /**
+   * Validates the centroid geometry writable.
+   *
+   * @param point
+   *            the represented point location.
+   * @param geometryAsWritable
+   *            the geometry represented as {@link BytesWritable}.
+   */
+  private static void validatePoint(Point point, BytesWritable geometryAsWritable) {
+    ST_X getX = new ST_X();
+    ST_Y getY = new ST_Y();
+    DoubleWritable xAsWritable = getX.evaluate(geometryAsWritable);
+    DoubleWritable yAsWritable = getY.evaluate(geometryAsWritable);
+
+    if (null == xAsWritable || null == yAsWritable || Math.abs(point.getX() - xAsWritable.get()) > Epsilon
+        || Math.abs(point.getY() - yAsWritable.get()) > Epsilon)
+      System.err.println("validateCentroid: " + (new ST_AsText()).evaluate(geometryAsWritable) + " ~ " + point);
+
+    assertNotNull("The x writable must not be null!", xAsWritable);
+    assertNotNull("The y writable must not be null!", yAsWritable);
+    assertEquals("Longitude is different!", point.getX(), xAsWritable.get(), Epsilon);
+    assertEquals("Latitude is different!", point.getY(), yAsWritable.get(), Epsilon);
+  }
+
+  @Test
+  public void TestSimplePointCentroid() throws Exception {
+    final ST_Centroid stCtr = new ST_Centroid();
+    final ST_Point stPt = new ST_Point();
+    BytesWritable bwGeom = stPt.evaluate(new Text("point (2 3)"));
+    BytesWritable bwCentroid = stCtr.evaluate(bwGeom);
+    validatePoint(new Point(2, 3), bwCentroid);
+  }
+
+  @Test
+  public void TestMultiPointCentroid() throws Exception {
+    final ST_Centroid stCtr = new ST_Centroid();
+    final ST_MultiPoint stMp = new ST_MultiPoint();
+    BytesWritable bwGeom = stMp.evaluate(new Text("multipoint ((0 0), (1 1), (1 -1), (6 0))"));
+    BytesWritable bwCentroid = stCtr.evaluate(bwGeom);
+    validatePoint(new Point(2, 0), bwCentroid);
+  }
+
+  @Test
+  public void TestLineCentroid() throws Exception {
+    final ST_Centroid stCtr = new ST_Centroid();
+    final ST_LineString stLn = new ST_LineString();
+    BytesWritable bwGeom = stLn.evaluate(new Text("linestring (0 0, 6 0)"));
+    BytesWritable bwCentroid = stCtr.evaluate(bwGeom);
+    validatePoint(new Point(3, 0), bwCentroid);
+    bwGeom = stLn.evaluate(new Text("linestring (0 0, 0 4, 12 4)"));
+    bwCentroid = stCtr.evaluate(bwGeom);
+    // L1 = 4, L2 = 12, W1 = 0.25, W2 = 0.75, X = W1 * 0 + W2 * 6, Y = W1 * 2 + W2 * 4
+    // Or like centroid of multipoint of 1 of (0 2) and 3 of (6 4)
+    validatePoint(new Point(4.5, 3.5), bwCentroid);
+  }
+
+  @Test
+  public void TestPolygonCentroid() throws Exception {
+    final ST_Centroid stCtr = new ST_Centroid();
+    final ST_Polygon stPoly = new ST_Polygon();
+    BytesWritable bwGeom = stPoly.evaluate(new Text("polygon ((0 0, 0 8, 8 8, 8 0, 0 0))"));
+    BytesWritable bwCentroid = stCtr.evaluate(bwGeom);
+    validatePoint(new Point(4, 4), bwCentroid);
+    bwGeom = stPoly.evaluate(new Text("polygon ((1 1, 5 1, 3 4, 1 1))"));
+    bwCentroid = stCtr.evaluate(bwGeom);
+    validatePoint(new Point(3, 2), bwCentroid);
+    bwGeom = stPoly.evaluate(new Text("polygon ((14 0, -14 0, -2 24, 2 24, 14 0))"));
+    bwCentroid = stCtr.evaluate(bwGeom);        // Cross-checked with ...
+    validatePoint(new Point(0, 9), bwCentroid);  // ... omnicalculator.com/math/centroid
+  }
+
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStGeomFromShape.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStGeomFromShape.java
new file mode 100644
index 00000000000..659f1bb97bf
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStGeomFromShape.java
@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import com.esri.core.geometry.Geometry;
+import com.esri.core.geometry.GeometryEngine;
+import com.esri.core.geometry.Point;
+import com.esri.core.geometry.Polygon;
+import com.esri.core.geometry.Polyline;
+import com.esri.core.geometry.SpatialReference;
+import com.esri.core.geometry.ogc.OGCGeometry;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+public class TestStGeomFromShape {
+
+  private final static double Epsilon = 0.0001;
+
+  private static Point createFirstLocation() {
+    final double longitude = 12.224;
+    final double latitude = 51.829;
+    return new Point(longitude, latitude);
+  }
+
+  private static Point createSecondLocation() {
+    final double longitude = 12.39807;
+    final double latitude = 51.34933;
+    return new Point(longitude, latitude);
+  }
+
+  private static Point createThirdLocation() {
+    final double longitude = 6.9823;
+    final double latitude = 50.7657;
+    return new Point(longitude, latitude);
+  }
+
+  private static Point createFourthLocation() {
+    final double longitude = 7.102594;
+    final double latitude = 50.73733;
+    return new Point(longitude, latitude);
+  }
+
+  private static Polyline createLine() {
+    Polyline line = new Polyline();
+    line.startPath(createFirstLocation());
+    line.lineTo(createSecondLocation());
+    return line;
+  }
+
+  private static Polyline createPolyline() {
+    Polyline line = new Polyline();
+    line.startPath(createFirstLocation());
+    line.lineTo(createSecondLocation());
+    line.lineTo(createThirdLocation());
+    line.lineTo(createFourthLocation());
+    return line;
+  }
+
+  private static Polygon createPolygon() {
+    Polygon polygon = new Polygon();
+    polygon.startPath(createFirstLocation());
+    polygon.lineTo(createSecondLocation());
+    polygon.lineTo(createThirdLocation());
+    polygon.lineTo(createFourthLocation());
+    polygon.closeAllPaths();
+    return polygon;
+  }
+
+  /**
+   * Validates the geometry writable.
+   *
+   * @param point
+   *            the represented point location.
+   * @param wkid
+   *            the represented spatial reference ID.
+   * @param geometryAsWritable
+   *            the geometry represented as {@link BytesWritable}.
+   */
+  private static void validatePoint(Point point, int wkid, BytesWritable geometryAsWritable) {
+    ST_X getX = new ST_X();
+    DoubleWritable xAsWritable = getX.evaluate(geometryAsWritable);
+    assertNotNull("The x writable must not be null!", xAsWritable);
+
+    ST_Y getY = new ST_Y();
+    DoubleWritable yAsWritable = getY.evaluate(geometryAsWritable);
+    assertNotNull("The y writable must not be null!", yAsWritable);
+
+    assertEquals("Longitude is different!", point.getX(), xAsWritable.get(), Epsilon);
+    assertEquals("Latitude is different!", point.getY(), yAsWritable.get(), Epsilon);
+
+    ST_SRID getWkid = new ST_SRID();
+    IntWritable wkidAsWritable = getWkid.evaluate(geometryAsWritable);
+    assertNotNull("The wkid writable must not be null!", wkidAsWritable);
+
+    assertEquals("The wkid is different!", wkid, wkidAsWritable.get());
+  }
+
+  @Test
+  public void testGeomFromPointShapeWithoutSpatialReference() throws UDFArgumentException {
+    Point point = createFirstLocation();
+
+    byte[] esriShape = GeometryEngine.geometryToEsriShape(point);
+    assertNotNull("The shape must not be null!", esriShape);
+
+    BytesWritable shapeAsWritable = new BytesWritable(esriShape);
+    assertNotNull("The shape writable must not be null!", shapeAsWritable);
+
+    ST_GeomFromShape fromShape = new ST_GeomFromShape();
+    BytesWritable geometryAsWritable = fromShape.evaluate(shapeAsWritable);
+    assertNotNull("The geometry writable must not be null!", geometryAsWritable);
+
+    final int wkid = 0;
+    validatePoint(point, wkid, geometryAsWritable);
+  }
+
+  @Test
+  public void testGeomFromPointShape() throws UDFArgumentException {
+    Point point = createFirstLocation();
+    byte[] esriShape = GeometryEngine.geometryToEsriShape(point);
+    assertNotNull("The shape must not be null!", esriShape);
+
+    BytesWritable shapeAsWritable = new BytesWritable(esriShape);
+    assertNotNull("The shape writable must not be null!", shapeAsWritable);
+
+    final int wkid = 4326;
+    ST_GeomFromShape fromShape = new ST_GeomFromShape();
+    BytesWritable geometryAsWritable = fromShape.evaluate(shapeAsWritable, wkid);
+    assertNotNull("The geometry writable must not be null!", geometryAsWritable);
+
+    validatePoint(point, wkid, geometryAsWritable);
+  }
+
+  @Test
+  public void testGeomFromLineShape() throws UDFArgumentException {
+    Polyline line = createLine();
+    byte[] esriShape = GeometryEngine.geometryToEsriShape(line);
+    assertNotNull("The shape must not be null!", esriShape);
+
+    BytesWritable shapeAsWritable = new BytesWritable(esriShape);
+    assertNotNull("The shape writable must not be null!", shapeAsWritable);
+
+    final int wkid = 4326;
+    ST_GeomFromShape fromShape = new ST_GeomFromShape();
+    BytesWritable geometryAsWritable = fromShape.evaluate(shapeAsWritable, wkid);
+    assertNotNull("The geometry writable must not be null!", geometryAsWritable);
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryAsWritable);
+    assertNotNull("The OGC geometry must not be null!", ogcGeometry);
+
+    Geometry esriGeometry = ogcGeometry.getEsriGeometry();
+    assertNotNull("The Esri geometry must not be null!", esriGeometry);
+    assertTrue("The geometries are different!",
+        GeometryEngine.equals(line, esriGeometry, SpatialReference.create(wkid)));
+  }
+
+  @Test
+  public void testGeomFromPolylineShape() throws UDFArgumentException {
+    Polyline line = createPolyline();
+    byte[] esriShape = GeometryEngine.geometryToEsriShape(line);
+    assertNotNull("The shape must not be null!", esriShape);
+
+    BytesWritable shapeAsWritable = new BytesWritable(esriShape);
+    assertNotNull("The shape writable must not be null!", shapeAsWritable);
+
+    final int wkid = 4326;
+    ST_GeomFromShape fromShape = new ST_GeomFromShape();
+    BytesWritable geometryAsWritable = fromShape.evaluate(shapeAsWritable, wkid);
+    assertNotNull("The geometry writable must not be null!", geometryAsWritable);
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryAsWritable);
+    assertNotNull("The OGC geometry must not be null!", ogcGeometry);
+
+    Geometry esriGeometry = ogcGeometry.getEsriGeometry();
+    assertNotNull("The Esri geometry must not be null!", esriGeometry);
+    assertTrue("The geometries are different!",
+        GeometryEngine.equals(line, esriGeometry, SpatialReference.create(wkid)));
+  }
+
+  @Test
+  public void testGeomFromPolygonShape() throws UDFArgumentException {
+    Polygon polygon = createPolygon();
+    byte[] esriShape = GeometryEngine.geometryToEsriShape(polygon);
+    assertNotNull("The shape must not be null!", esriShape);
+
+    BytesWritable shapeAsWritable = new BytesWritable(esriShape);
+    assertNotNull("The shape writable must not be null!", shapeAsWritable);
+
+    final int wkid = 4326;
+    ST_GeomFromShape fromShape = new ST_GeomFromShape();
+    BytesWritable geometryAsWritable = fromShape.evaluate(shapeAsWritable, wkid);
+    assertNotNull("The geometry writable must not be null!", geometryAsWritable);
+
+    OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geometryAsWritable);
+    assertNotNull("The OGC geometry must not be null!", ogcGeometry);
+
+    Geometry esriGeometry = ogcGeometry.getEsriGeometry();
+    assertNotNull("The Esri geometry must not be null!", esriGeometry);
+    assertTrue("The geometries are different!",
+        GeometryEngine.equals(polygon, esriGeometry, SpatialReference.create(wkid)));
+  }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStGeometryType.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStGeometryType.java
new file mode 100644
index 00000000000..216aad2f1d6
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStGeometryType.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+// select ST_GeometryType(ST_Point(0, 0)) from onerow;
+// select ST_GeometryType(ST_Point('point (10.02 20.01)')) from onerow;
+// select ST_GeometryType(ST_Point('point z (10.02 20.01 2)')) from onerow;
+// select ST_GeometryType(ST_MultiPoint('multipoint ((1 2))')) from onerow;
+// select ST_GeometryType(ST_Linestring(10,10, 20,20)) from onerow;
+// select ST_GeometryType(ST_Linestring('linestring (10 10, 20 20)')) from onerow;
+// select ST_GeometryType(ST_Linestring('linestring z (10 10 2, 20 20 4)')) from onerow;
+// select ST_GeometryType(ST_GeomFromText('polygon ((0 0, 0 10, 10 0, 0 0))')) from onerow;
+// select ST_GeometryType(ST_Polygon('polygon ((0 0, 0 10, 10 0, 0 0))')) from onerow;
+// select ST_GeometryType(ST_Polygon(1,1, 1,4, 4,1)) from onerow;
+// select ST_GeometryType(ST_Polygon(1,1, 4,1, 1,4)) from onerow;
+// select ST_GeometryType(ST_Polygon(1,1, 1,4, 4,1, 1,1)) from onerow;
+// select ST_GeometryType(ST_Polygon(1,1, 4,1, 1,4, 1,1)) from onerow;
+// select ST_GeometryType(ST_GeomFromGeoJson('{"type":"Point", "coordinates":[1.2, 2.4]}')) from onerow;
+
+public class TestStGeometryType {
+
+  @Test
+  public void TestStGeometryType() throws Exception {
+    ST_GeometryType typer = new ST_GeometryType();
+    ST_Point stPt = new ST_Point();
+    ST_MultiPoint stMp = new ST_MultiPoint();
+    ST_LineString stLn = new ST_LineString();
+    ST_Polygon stPoly = new ST_Polygon();
+    BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(0), new DoubleWritable(0));
+    Text gty = typer.evaluate(bwGeom);
+    assertEquals("ST_POINT", gty.toString());
+    bwGeom = stPt.evaluate(new Text("point z (10.02 20.01 2)"));
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_POINT", gty.toString());
+    bwGeom = stLn.evaluate(new Text("linestring (10 10, 20 20)"));
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_LINESTRING", gty.toString());
+    bwGeom = stPoly.evaluate(new Text("polygon ((0 0, 0 10, 10 0, 0 0))"));
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_POLYGON", gty.toString());
+  }
+
+}
+
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStLineString.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStLineString.java
new file mode 100644
index 00000000000..cb3527bb90c
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStLineString.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import static org.junit.Assert.assertEquals;
+
+// select ST_GeometryType(ST_Linestring(10,10, 20,20)) from onerow;
+// select ST_GeometryType(ST_Linestring('linestring (10 10, 20 20)')) from onerow;
+// select ST_GeometryType(ST_Linestring('linestring z (10 10 2, 20 20 4)')) from onerow;
+
+public class TestStLineString {
+
+  @Test
+  public void test() throws Exception {
+    ST_GeometryType typer = new ST_GeometryType();
+    ST_LineString stLn = new ST_LineString();
+    //ST_Equals stEq = new ST_Equals();
+    DoubleWritable ten = new DoubleWritable(10);
+    DoubleWritable twenty = new DoubleWritable(20);
+    BytesWritable bwGeom = stLn.evaluate(ten, ten, twenty);
+    assertEquals(null, bwGeom);  // odd arguments
+    bwGeom = stLn.evaluate(ten, ten, twenty, twenty);
+    Text gty = typer.evaluate(bwGeom);
+    assertEquals("ST_LINESTRING", gty.toString());
+    Text wkt = new Text("linestring (10 10, 20 20)");
+    bwGeom = stLn.evaluate(wkt);
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_LINESTRING", gty.toString());
+    //GUDF assertTrue(stEq.~eval~(new ST_GeomFromText().evaluate(wkt), bwGeom));
+    bwGeom = stLn.evaluate(new Text("linestring z (10 10 2, 20 20 4)"));
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_LINESTRING", gty.toString());
+    ArrayList<DoubleWritable> xs = new ArrayList<DoubleWritable>(Arrays.asList(ten, twenty));
+    ArrayList<DoubleWritable> ys = new ArrayList<DoubleWritable>(Arrays.asList(twenty, ten));
+    bwGeom = stLn.evaluate(xs, ys);
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_LINESTRING", gty.toString());
+    BytesWritable pt1020 = new ST_Point().evaluate(ten, twenty);
+    BytesWritable pt2010 = new ST_Point().evaluate(twenty, ten);
+    ArrayList<BytesWritable> pts = new ArrayList<BytesWritable>(Arrays.asList(pt1020, pt2010));
+    bwGeom = stLn.evaluate(pts);
+    gty = typer.evaluate(bwGeom);
+    assertEquals("ST_LINESTRING", gty.toString());
+  }
+
+}
+
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStMinX.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStMinX.java
new file mode 100644
index 00000000000..b3eaedea8e6
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStMinX.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+// select ST_MinX(ST_Point(1,2)) from onerow;
+// select ST_MinX(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow;
+// select ST_MinX(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow;
+// select ST_MinX(ST_MultiPoint(0,0, 2,2)) from onerow;
+// select ST_MinX(ST_MultiLineString(array(1, 1, 2, 2), array(10, 10, 20, 20))) from onerow;
+// select ST_MinX(ST_MultiPolygon(array(1,1, 1,2, 2,2, 2,1), array(3,3, 3,4, 4,4, 4,3))) from onerow;
+
+public class TestStMinX {
+
+  @Test
+  public void TestStMinX() {
+    ST_MinX stMinX = new ST_MinX();
+    ST_Point stPt = new ST_Point();
+    BytesWritable bwGeom = stPt.evaluate(new DoubleWritable(1.2), new DoubleWritable(3.4));
+    DoubleWritable dwx = stMinX.evaluate(bwGeom);
+    assertEquals(1.2, dwx.get(), .000001);
+    bwGeom = stPt.evaluate(new DoubleWritable(6.5), new DoubleWritable(4.3), new DoubleWritable(2.1));
+    dwx = stMinX.evaluate(bwGeom);
+    assertEquals(6.5, dwx.get(), 0.0);
+  }
+
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStMinY.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStMinY.java
new file mode 100644
index 00000000000..cd0e1b2cb0a
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/esri/TestStMinY.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.esri;
+
+import org.junit.Test;
+
+// select ST_MinY(ST_GeomFromGeoJson('{"type":"LineString", "coordinates":[[1,2], [3,4]]}')) from onerow;
+// select ST_MinY(ST_Point(1,2)) from onerow;
+// select ST_MinY(ST_LineString(1.5,2.5, 3.0,2.2)) from onerow;
+// select ST_MinY(ST_Polygon(1, 1, 1, 4, 4, 4, 4, 1)) from onerow;
+// select ST_MinY(ST_MultiPoint(0,0, 2,2)) from onerow;
... 1730 lines suppressed ...