You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by su...@apache.org on 2016/04/20 07:32:35 UTC

[3/3] drill git commit: DRILL-4459: Resolve SchemaChangeException while querying hive json table

DRILL-4459: Resolve SchemaChangeException while querying hive json table

- Replace drill var16char to varchar datatype for hive string datatype
- Change testGenericUDF() and testUDF() to use VarChar instead of Var16Char
- Add unit test for hive GET_JSON_OBJECT UDF

closes #431


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/3b056db0
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/3b056db0
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/3b056db0

Branch: refs/heads/master
Commit: 3b056db0f504d50fe11a6028b1a633ec74d478d2
Parents: 852b01a
Author: Vitalii Diravka <vi...@gmail.com>
Authored: Thu Mar 10 14:52:28 2016 +0000
Committer: Sudheesh Katkam <sk...@maprtech.com>
Committed: Tue Apr 19 21:58:16 2016 -0700

----------------------------------------------------------------------
 .../core/src/main/codegen/data/HiveTypes.tdd    |  4 +-
 .../templates/ObjectInspectorHelper.java        | 26 +++++++---
 .../codegen/templates/ObjectInspectors.java     | 20 ++++----
 .../drill/exec/expr/fn/HiveFuncHolder.java      |  5 +-
 .../exec/expr/fn/HiveFunctionRegistry.java      | 12 ++---
 .../apache/drill/exec/fn/hive/TestHiveUDFs.java | 51 ++++++++++----------
 .../drill/exec/fn/hive/TestInbuiltHiveUDFs.java | 14 ++++++
 .../exec/hive/TestInfoSchemaOnHiveStorage.java  |  1 +
 .../exec/store/hive/HiveTestDataGenerator.java  |  6 +++
 .../resources/functions/hive/GenericUDF.json    |  8 +--
 .../src/test/resources/functions/hive/UDF.json  |  2 +-
 .../core/src/test/resources/simple.json         | 19 ++++++++
 12 files changed, 110 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd b/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
index 73752a3..e13dc36 100644
--- a/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
+++ b/contrib/storage-hive/core/src/main/codegen/data/HiveTypes.tdd
@@ -80,10 +80,10 @@
       needOIForDrillType: false
     },
     {
-      hiveType: "STRING",l
+      hiveType: "STRING",
       hiveOI: "StringObjectInspector",
       javaType: "",
-      drillType: "Var16Char",
+      drillType: "VarChar",
       needOIForDrillType: true
     },
     {

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
index da83c40..d068868 100644
--- a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
+++ b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectorHelper.java
@@ -40,30 +40,40 @@ import java.lang.UnsupportedOperationException;
 import java.sql.Timestamp;
 import java.util.HashMap;
 import java.util.Map;
+import com.google.common.collect.Multimap;
+import com.google.common.collect.ArrayListMultimap;
 
 public class ObjectInspectorHelper {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ObjectInspectorHelper.class);
 
-  private static Map<MinorType, Class> OIMAP_REQUIRED = new HashMap<>();
-  private static Map<MinorType, Class> OIMAP_OPTIONAL = new HashMap<>();
+  private static Multimap<MinorType, Class> OIMAP_REQUIRED = ArrayListMultimap.create();
+  private static Multimap<MinorType, Class> OIMAP_OPTIONAL = ArrayListMultimap.create();
   static {
 <#list drillOI.map as entry>
     <#if entry.needOIForDrillType == true>
-    OIMAP_REQUIRED.put(MinorType.${entry.drillType?upper_case}, Drill${entry.drillType}ObjectInspector.Required.class);
-    OIMAP_OPTIONAL.put(MinorType.${entry.drillType?upper_case}, Drill${entry.drillType}ObjectInspector.Optional.class);
+    OIMAP_REQUIRED.put(MinorType.${entry.drillType?upper_case}, Drill${entry.drillType}${entry.hiveOI}.Required.class);
+    OIMAP_OPTIONAL.put(MinorType.${entry.drillType?upper_case}, Drill${entry.drillType}${entry.hiveOI}.Optional.class);
     </#if>
 </#list>
   }
 
-  public static ObjectInspector getDrillObjectInspector(DataMode mode, MinorType minorType) {
+  public static ObjectInspector getDrillObjectInspector(DataMode mode, MinorType minorType, boolean varCharToStringReplacement) {
     try {
       if (mode == DataMode.REQUIRED) {
         if (OIMAP_REQUIRED.containsKey(minorType)) {
-          return (ObjectInspector) OIMAP_REQUIRED.get(minorType).newInstance();
+          if (varCharToStringReplacement && minorType == MinorType.VARCHAR) {
+            return (ObjectInspector) ((Class) OIMAP_REQUIRED.get(minorType).toArray()[1]).newInstance();
+          } else {
+            return (ObjectInspector) ((Class) OIMAP_REQUIRED.get(minorType).toArray()[0]).newInstance();
+          }
         }
       } else if (mode == DataMode.OPTIONAL) {
         if (OIMAP_OPTIONAL.containsKey(minorType)) {
-          return (ObjectInspector) OIMAP_OPTIONAL.get(minorType).newInstance();
+          if (varCharToStringReplacement && minorType == MinorType.VARCHAR) {
+            return (ObjectInspector) ((Class) OIMAP_OPTIONAL.get(minorType).toArray()[1]).newInstance();
+          } else {
+            return (ObjectInspector) ((Class) OIMAP_OPTIONAL.get(minorType).toArray()[0]).newInstance();
+          }
         }
       } else {
         throw new UnsupportedOperationException("Repeated types are not supported as arguement to Hive UDFs");
@@ -191,7 +201,7 @@ public class ObjectInspectorHelper {
           <#elseif entry.hiveType == "STRING">
             JVar data = jc._else().decl(m.directClass(byte[].class.getCanonicalName()), "data",
               castedOI.invoke("getPrimitiveJavaObject").arg(returnValue)
-                      .invoke("getBytes").arg(DirectExpression.direct("com.google.common.base.Charsets.UTF_16")));
+                      .invoke("getBytes"));
             jc._else().add(returnValueHolder.ref("buffer")
               .invoke("setBytes").arg(JExpr.lit(0)).arg(data));
             jc._else().assign(returnValueHolder.ref("start"), JExpr.lit(0));

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
index 5c696f9..ffd3a56 100644
--- a/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
+++ b/contrib/storage-hive/core/src/main/codegen/templates/ObjectInspectors.java
@@ -19,7 +19,7 @@
 
 <#list drillOI.map as entry>
 <#if entry.needOIForDrillType == true>
-<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/hive/Drill${entry.drillType}ObjectInspector.java" />
+<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/hive/Drill${entry.drillType}${entry.hiveOI}.java" />
 
 <#include "/@includes/license.ftl" />
 
@@ -47,7 +47,7 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 
-public class Drill${entry.drillType}ObjectInspector {
+public class Drill${entry.drillType}${entry.hiveOI} {
 <#assign seq = ["Required", "Optional"]>
 <#list seq as mode>
 
@@ -56,7 +56,7 @@ public class Drill${entry.drillType}ObjectInspector {
       super(TypeInfoFactory.${entry.hiveType?lower_case}TypeInfo);
     }
 
-<#if entry.drillType == "VarChar">
+<#if entry.drillType == "VarChar" && entry.hiveType == "VARCHAR">
     @Override
     public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
     <#if mode == "Optional">
@@ -85,18 +85,18 @@ public class Drill${entry.drillType}ObjectInspector {
       final String s = StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer);
       return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH);
     }
-<#elseif entry.drillType == "Var16Char">
+<#elseif entry.drillType == "VarChar" && entry.hiveType == "STRING">
     @Override
     public Text getPrimitiveWritableObject(Object o) {
     <#if mode == "Optional">
       if (o == null) {
         return null;
       }
-      final NullableVar16CharHolder h = (NullableVar16CharHolder)o;
+      final NullableVarCharHolder h = (NullableVarCharHolder)o;
     <#else>
-      final Var16CharHolder h = (Var16CharHolder)o;
+      final VarCharHolder h = (VarCharHolder)o;
     </#if>
-      return new Text(StringFunctionHelpers.toStringFromUTF16(h.start, h.end, h.buffer));
+      return new Text(StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer));
     }
 
     @Override
@@ -105,11 +105,11 @@ public class Drill${entry.drillType}ObjectInspector {
       if (o == null) {
         return null;
       }
-      final NullableVar16CharHolder h = (NullableVar16CharHolder)o;
+      final NullableVarCharHolder h = (NullableVarCharHolder)o;
     <#else>
-      final Var16CharHolder h = (Var16CharHolder)o;
+      final VarCharHolder h = (VarCharHolder)o;
     </#if>
-      return StringFunctionHelpers.toStringFromUTF16(h.start, h.end, h.buffer);
+      return StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer);
     }
 <#elseif entry.drillType == "VarBinary">
     @Override

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFuncHolder.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFuncHolder.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFuncHolder.java
index a6e76b0..0a3cf18 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFuncHolder.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFuncHolder.java
@@ -45,6 +45,7 @@ import com.sun.codemodel.JExpr;
 import com.sun.codemodel.JInvocation;
 import com.sun.codemodel.JTryBlock;
 import com.sun.codemodel.JVar;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 
 public class HiveFuncHolder extends AbstractFuncHolder {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FunctionImplementationRegistry.class);
@@ -188,7 +189,9 @@ public class HiveFuncHolder extends AbstractFuncHolder {
         oiArray.component(JExpr.lit(i)),
         oih.staticInvoke("getDrillObjectInspector")
           .arg(mode.staticInvoke("valueOf").arg(JExpr.lit(argTypes[i].getMode().getNumber())))
-          .arg(mt.staticInvoke("valueOf").arg(JExpr.lit(argTypes[i].getMinorType().getNumber()))));
+          .arg(mt.staticInvoke("valueOf").arg(JExpr.lit(argTypes[i].getMinorType().getNumber())))
+          .arg((((PrimitiveObjectInspector) returnOI).getPrimitiveCategory() ==
+              PrimitiveObjectInspector.PrimitiveCategory.STRING) ? JExpr.lit(true) : JExpr.lit(false)));
     }
 
     // declare and instantiate DeferredObject array

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFunctionRegistry.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFunctionRegistry.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFunctionRegistry.java
index 5e74f6f..c716e9e 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFunctionRegistry.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/expr/fn/HiveFunctionRegistry.java
@@ -126,23 +126,21 @@ public class HiveFunctionRegistry implements PluggableFunctionRegistry{
 
   /**
    * Helper method which resolves the given function call to a Hive UDF. It takes an argument
-   * <i>convertVarCharToVar16Char</i> which tells to implicitly cast input arguments of type VARCHAR to VAR16CHAR
-   * and search Hive UDF registry using implicitly casted argument types.
+   * <i>varCharToStringReplacement</i> which tells to use hive STRING(true) or VARCHAR(false) type for drill VARCHAR type
+   * and search Hive UDF registry using this replacement.
    *
    * TODO: This is a rudimentary function resolver. Need to include more implicit casting such as DECIMAL28 to
    * DECIMAL38 as Hive UDFs can accept only DECIMAL38 type.
    */
-  private HiveFuncHolder resolveFunction(FunctionCall call, boolean convertVarCharToVar16Char) {
+  private HiveFuncHolder resolveFunction(FunctionCall call, boolean varCharToStringReplacement) {
     HiveFuncHolder holder;
     MajorType[] argTypes = new MajorType[call.args.size()];
     ObjectInspector[] argOIs = new ObjectInspector[call.args.size()];
     for (int i=0; i<call.args.size(); i++) {
       try {
         argTypes[i] = call.args.get(i).getMajorType();
-        if (convertVarCharToVar16Char && argTypes[i].getMinorType() == MinorType.VARCHAR) {
-          argTypes[i] = Types.withMode(MinorType.VAR16CHAR, argTypes[i].getMode());
-        }
-        argOIs[i] = ObjectInspectorHelper.getDrillObjectInspector(argTypes[i].getMode(), argTypes[i].getMinorType());
+        argOIs[i] = ObjectInspectorHelper.getDrillObjectInspector(argTypes[i].getMode(), argTypes[i].getMinorType(),
+            varCharToStringReplacement);
       } catch(Exception e) {
         // Hive throws errors if there are unsupported types. Consider there is no hive UDF supporting the
         // given argument types

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestHiveUDFs.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestHiveUDFs.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestHiveUDFs.java
index 3ce9a6d..2ad8edc 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestHiveUDFs.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestHiveUDFs.java
@@ -25,11 +25,12 @@ import java.util.List;
 import org.apache.drill.BaseTestQuery;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
+import org.apache.drill.exec.vector.BigIntVector;
 import org.apache.drill.exec.vector.Float4Vector;
+import org.apache.drill.exec.vector.IntVector;
 import org.apache.drill.exec.vector.NullableFloat8Vector;
-import org.apache.drill.exec.vector.NullableIntVector;
-import org.apache.drill.exec.vector.NullableVar16CharVector;
-import org.apache.drill.exec.vector.Var16CharVector;
+import org.apache.drill.exec.vector.NullableVarCharVector;
+import org.apache.drill.exec.vector.VarCharVector;
 import org.junit.Test;
 
 import com.google.common.base.Charsets;
@@ -53,42 +54,42 @@ public class TestHiveUDFs extends BaseTestQuery {
         continue;
       }
       // Output columns and types
-      //  1. str1 : Var16Char
-      //  2. upperStr1 : NullableVar16Char
+      //  1. str1 : VarChar
+      //  2. upperStr1 : NullableVarChar
       //  3. concat : NullableVarChar
       //  4. flt1 : Float4
       //  5. format_number : NullableFloat8
-      //  6. nullableStr1 : NullableVar16Char
-      //  7. upperNullableStr1 : NullableVar16Char
-      Var16CharVector str1V = (Var16CharVector) batchLoader.getValueAccessorById(Var16CharVector.class, 0).getValueVector();
-      NullableVar16CharVector upperStr1V = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 1).getValueVector();
-      NullableVar16CharVector concatV = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 2).getValueVector();
+      //  6. nullableStr1 : NullableVarChar
+      //  7. upperNullableStr1 : NullableVarChar
+      VarCharVector str1V = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
+      NullableVarCharVector upperStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 1).getValueVector();
+      NullableVarCharVector concatV = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 2).getValueVector();
       Float4Vector flt1V = (Float4Vector) batchLoader.getValueAccessorById(Float4Vector.class, 3).getValueVector();
-      NullableVar16CharVector format_numberV = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 4).getValueVector();
-      NullableVar16CharVector nullableStr1V = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 5).getValueVector();
-      NullableVar16CharVector upperNullableStr1V = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 6).getValueVector();
+      NullableVarCharVector format_numberV = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 4).getValueVector();
+      NullableVarCharVector nullableStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 5).getValueVector();
+      NullableVarCharVector upperNullableStr1V = (NullableVarCharVector) batchLoader.getValueAccessorById(NullableVarCharVector.class, 6).getValueVector();
 
       for (int i=0; i<batchLoader.getRecordCount(); i++) {
-        String in = new String(str1V.getAccessor().get(i), Charsets.UTF_16);
-        String upper = new String(upperStr1V.getAccessor().get(i), Charsets.UTF_16);
+        String in = new String(str1V.getAccessor().get(i), Charsets.UTF_8);
+        String upper = new String(upperStr1V.getAccessor().get(i), Charsets.UTF_8);
         assertTrue(in.toUpperCase().equals(upper));
 
 
-        String concat = new String(concatV.getAccessor().get(i), Charsets.UTF_16);
+        String concat = new String(concatV.getAccessor().get(i), Charsets.UTF_8);
         assertTrue(concat.equals(in+"-"+in));
 
         float flt1 = flt1V.getAccessor().get(i);
-        String format_number = new String(format_numberV.getAccessor().get(i), Charsets.UTF_16);
+        String format_number = new String(format_numberV.getAccessor().get(i), Charsets.UTF_8);
 
 
         String nullableStr1 = null;
         if (!nullableStr1V.getAccessor().isNull(i)) {
-          nullableStr1 = new String(nullableStr1V.getAccessor().get(i), Charsets.UTF_16);
+          nullableStr1 = new String(nullableStr1V.getAccessor().get(i), Charsets.UTF_8);
         }
 
         String upperNullableStr1 = null;
         if (!upperNullableStr1V.getAccessor().isNull(i)) {
-          upperNullableStr1 = new String(upperNullableStr1V.getAccessor().get(i), Charsets.UTF_16);
+          upperNullableStr1 = new String(upperNullableStr1V.getAccessor().get(i), Charsets.UTF_8);
         }
 
         assertEquals(nullableStr1 != null, upperNullableStr1 != null);
@@ -125,20 +126,20 @@ public class TestHiveUDFs extends BaseTestQuery {
       }
 
       // Output columns and types
-      // 1. str1 : Var16Char
+      // 1. str1 : VarChar
       // 2. str1Length : Int
       // 3. str1Ascii : Int
       // 4. flt1 : Float4
       // 5. pow : Float8
-      Var16CharVector str1V = (Var16CharVector) batchLoader.getValueAccessorById(Var16CharVector.class, 0).getValueVector();
-      NullableIntVector str1LengthV = (NullableIntVector) batchLoader.getValueAccessorById(NullableIntVector.class, 1).getValueVector();
-      NullableIntVector str1AsciiV = (NullableIntVector) batchLoader.getValueAccessorById(NullableIntVector.class, 2).getValueVector();
+      VarCharVector str1V = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
+      BigIntVector str1LengthV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
+      IntVector str1AsciiV = (IntVector) batchLoader.getValueAccessorById(IntVector.class, 2).getValueVector();
       Float4Vector flt1V = (Float4Vector) batchLoader.getValueAccessorById(Float4Vector.class, 3).getValueVector();
       NullableFloat8Vector powV = (NullableFloat8Vector) batchLoader.getValueAccessorById(NullableFloat8Vector.class, 4).getValueVector();
 
       for (int i=0; i<batchLoader.getRecordCount(); i++) {
-        String str1 = new String(str1V.getAccessor().get(i), Charsets.UTF_16);
-        int str1Length = str1LengthV.getAccessor().get(i);
+        String str1 = new String(str1V.getAccessor().get(i), Charsets.UTF_8);
+        long str1Length = str1LengthV.getAccessor().get(i);
         assertTrue(str1.length() == str1Length);
 
         int str1Ascii = str1AsciiV.getAccessor().get(i);

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
index a126aaa..93c9374 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/fn/hive/TestInbuiltHiveUDFs.java
@@ -19,9 +19,11 @@ package org.apache.drill.exec.fn.hive;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.lang3.tuple.Pair;
+import org.apache.drill.TestBuilder;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.hive.HiveTestBase;
+import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.junit.Test;
 
 import java.util.List;
@@ -70,4 +72,16 @@ public class TestInbuiltHiveUDFs extends HiveTestBase {
         .build()
         .run();
   }
+
+  @Test // DRILL-4459
+  public void testGetJsonObject() throws Exception {
+    testBuilder()
+        .sqlQuery("select convert_from(json, 'json') as json from hive.simple_json " +
+            "where GET_JSON_OBJECT(simple_json.json, '$.employee_id') like 'Emp2'")
+        .ordered()
+        .baselineColumns("json")
+        .baselineValues(TestBuilder.mapOf("employee_id","Emp2","full_name","Kamesh",
+            "first_name","Bh","last_name","Venkata","position","Store"))
+        .go();
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
index 03acd22..a8c6e68 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/TestInfoSchemaOnHiveStorage.java
@@ -42,6 +42,7 @@ public class TestInfoSchemaOnHiveStorage extends HiveTestBase {
         .baselineValues("hive.default", "kv_parquet")
         .baselineValues("hive.default", "kv_sh")
         .baselineValues("hive.default", "countstar_parquet")
+        .baselineValues("hive.default", "simple_json")
         .go();
 
     testBuilder()

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
index 9c2fe1c..7a5b72d 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/store/hive/HiveTestDataGenerator.java
@@ -24,6 +24,7 @@ import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.Map;
 
+import com.google.common.io.Resources;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.drill.BaseTestQuery;
@@ -504,6 +505,11 @@ public class HiveTestDataGenerator {
     executeQuery(hiveDriver, createTableWithHeaderFooterProperties("skipper.kv_sequencefile_large", "sequencefile", "1", "1"));
     executeQuery(hiveDriver, "insert into table skipper.kv_sequencefile_large select * from skipper.kv_text_large");
 
+      // Create a table based on json file
+      executeQuery(hiveDriver, "create table default.simple_json(json string)");
+      final String loadData = String.format("load data local inpath '" +
+          Resources.getResource("simple.json") + "' into table default.simple_json");
+      executeQuery(hiveDriver, loadData);
     ss.close();
   }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/resources/functions/hive/GenericUDF.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/resources/functions/hive/GenericUDF.json b/contrib/storage-hive/core/src/test/resources/functions/hive/GenericUDF.json
index f5ae299..5dc076e 100644
--- a/contrib/storage-hive/core/src/test/resources/functions/hive/GenericUDF.json
+++ b/contrib/storage-hive/core/src/test/resources/functions/hive/GenericUDF.json
@@ -13,10 +13,10 @@
             url: "http://apache.org",
             entries:[
                 {records: 100, types: [
-                   {name: "str1", type: "VAR16CHAR", mode: "REQUIRED"},
-                   {name: "str2", type: "VAR16CHAR", mode: "REQUIRED"},
-                   {name: "str3", type: "VAR16CHAR", mode: "REQUIRED"},
-                   {name: "nullableStr1", type: "VAR16CHAR", mode: "OPTIONAL"},
+                   {name: "str1", type: "VARCHAR", mode: "REQUIRED"},
+                   {name: "str2", type: "VARCHAR", mode: "REQUIRED"},
+                   {name: "str3", type: "VARCHAR", mode: "REQUIRED"},
+                   {name: "nullableStr1", type: "VARCHAR", mode: "OPTIONAL"},
                    {name: "flt1", type: "FLOAT4", mode: "REQUIRED"}
                 ]}
             ]

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/resources/functions/hive/UDF.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/resources/functions/hive/UDF.json b/contrib/storage-hive/core/src/test/resources/functions/hive/UDF.json
index c2c7169..fecabec 100644
--- a/contrib/storage-hive/core/src/test/resources/functions/hive/UDF.json
+++ b/contrib/storage-hive/core/src/test/resources/functions/hive/UDF.json
@@ -13,7 +13,7 @@
             url: "http://apache.org",
             entries:[
                 {records: 100, types: [
-                   {name: "str1", type: "VAR16CHAR", mode: "REQUIRED"},
+                   {name: "str1", type: "VARCHAR", mode: "REQUIRED"},
                    {name: "flt1", type: "FLOAT4", mode: "REQUIRED"}
                 ]}
             ]

http://git-wip-us.apache.org/repos/asf/drill/blob/3b056db0/contrib/storage-hive/core/src/test/resources/simple.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/test/resources/simple.json b/contrib/storage-hive/core/src/test/resources/simple.json
new file mode 100644
index 0000000..0bf69ce
--- /dev/null
+++ b/contrib/storage-hive/core/src/test/resources/simple.json
@@ -0,0 +1,19 @@
+[ {"employee_id":"Emp1101","full_name":"Steve Eurich","first_name":"Steve","last_name":"Eurich","position":"Store T"},
+  {"employee_id":"Emp1102","full_name":"Mary Pierson","first_name":"Mary","last_name":"Pierson","position":"Store T"},
+  {"employee_id":"Emp1103","full_name":"Leo Jones","first_name":"Leo","last_name":"Jones","position":"Store Tem"},
+  {"employee_id":"Emp1104","full_name":"Nancy Beatty","first_name":"Nancy","last_name":"Beatty","position":"Store T"},
+  {"employee_id":"Emp1105","full_name":"Clara McNight","first_name":"Clara","last_name":"McNight","position":"Store"},
+  {"employee_id":"Emp1106","first_name":"Marcella","last_name":"Isaacs","position_id":17,"position":"Stor"},
+  {"employee_id":"Emp1107","full_name":"Charlotte Yonce","first_name":"Charlotte","last_name":"Yonce","position":"Stor"},
+  {"employee_id":"Emp1108","full_name":"Benjamin Foster","first_name":"Benjamin","last_name":"Foster","position":"Stor"},
+  {"employee_id":"Emp1109","full_name":"John Reed","first_name":"John","last_name":"Reed","position":"Store Per"},
+  {"employee_id":"Emp1110","full_name":"Lynn Kwiatkowski","first_name":"Lynn","last_name":"Kwiatkowski","position":"St"},
+  {"employee_id":"Emp1111","full_name":"Donald Vann","first_name":"Donald","last_name":"Vann","position":"Store Per"},
+  {"employee_id":"Emp1112","first_name":"William","last_name":"Smith","position":"St"},
+  {"employee_id":"Emp1113","full_name":"Amy Hensley","first_name":"Amy","last_name":"Hensley","position":"Store Pe"},
+  {"employee_id":"Emp1114","full_name":"Judy Owens","first_name":"Judy","last_name":"Owens","position":"Store Per"},
+  {"employee_id":"Emp1115","full_name":"Frederick Castillo","first_name":"Frederick","last_name":"Castillo","position":"S"},
+  {"employee_id":"Emp1116","full_name":"Phil Munoz","first_name":"Phil","last_name":"Munoz","position":"Store Per"},
+  {"employee_id":"Emp1117","full_name":"Lori Lightfoot","first_name":"Lori","last_name":"Lightfoot","position":"Store"},
+  {"employee_id":"Emp1","full_name":"Kumar","first_name":"Anil","last_name":"B","position":"Store"},
+  {"employee_id":"Emp2","full_name":"Kamesh","first_name":"Bh","last_name":"Venkata","position":"Store"} ]