You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gs...@apache.org on 2023/01/30 22:18:15 UTC

[hive] branch master updated: HIVE-26774 - Implement array_slice UDF to get the subset of elements from an array (subarray) (#3893)(Taraka Rama Rao Lethavadla, reviewed by Sai Hemanth, Sourabh Badhya)

This is an automated email from the ASF dual-hosted git repository.

gsaihemanth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new fcf80448513 HIVE-26774 - Implement array_slice UDF to get the subset of elements from an array (subarray) (#3893)(Taraka Rama Rao Lethavadla, reviewed by Sai Hemanth, Sourabh Badhya)
fcf80448513 is described below

commit fcf80448513f17c48cfac5ff5cf613e93dd313f2
Author: tarak271 <ta...@gmail.com>
AuthorDate: Tue Jan 31 03:47:50 2023 +0530

    HIVE-26774 - Implement array_slice UDF to get the subset of elements from an array (subarray) (#3893)(Taraka Rama Rao Lethavadla, reviewed by Sai Hemanth, Sourabh Badhya)
---
 .../hadoop/hive/ql/exec/FunctionRegistry.java      |   1 +
 .../udf/generic/AbstractGenericUDFArrayBase.java   |  17 +++
 .../hive/ql/udf/generic/GenericUDFArraySlice.java  |  73 +++++++++++
 .../ql/udf/generic/TestGenericUDFArraySlice.java   | 142 +++++++++++++++++++++
 .../test/queries/clientpositive/udf_array_slice.q  |  38 ++++++
 .../clientpositive/llap/show_functions.q.out       |   3 +
 .../clientpositive/llap/udf_array_slice.q.out      | 112 ++++++++++++++++
 7 files changed, 386 insertions(+)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index d2deb5d82ff..3e633595fc5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -603,6 +603,7 @@ public final class FunctionRegistry {
     system.registerGenericUDF("array_min", GenericUDFArrayMin.class);
     system.registerGenericUDF("array_max", GenericUDFArrayMax.class);
     system.registerGenericUDF("array_distinct", GenericUDFArrayDistinct.class);
+    system.registerGenericUDF("array_slice", GenericUDFArraySlice.class);
     system.registerGenericUDF("deserialize", GenericUDFDeserialize.class);
     system.registerGenericUDF("sentences", GenericUDFSentences.class);
     system.registerGenericUDF("map_keys", GenericUDFMapKeys.class);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFArrayBase.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFArrayBase.java
index d954cbb1e3b..22ad5b8aa6e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFArrayBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFArrayBase.java
@@ -19,8 +19,10 @@ package org.apache.hadoop.hive.ql.udf.generic;
 
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -89,6 +91,21 @@ public abstract class AbstractGenericUDFArrayBase extends GenericUDF {
         }
     }
 
+    void checkArgIntPrimitiveCategory(PrimitiveObjectInspector objectInspector, String functionName, int idx)
+        throws UDFArgumentTypeException {
+      switch (objectInspector.getPrimitiveCategory()) {
+      case SHORT:
+      case INT:
+      case LONG:
+        break;
+      default:
+        throw new UDFArgumentTypeException(0,
+            "Argument " + idx + " of function " + functionName + " must be \"" + serdeConstants.SMALLINT_TYPE_NAME + "\""
+                + " or \"" + serdeConstants.INT_TYPE_NAME + "\"" + " or \"" + serdeConstants.BIGINT_TYPE_NAME
+                + "\", but \"" + objectInspector.getTypeName() + "\" was found.");
+      }
+    }
+
     ObjectInspector initOI(ObjectInspector[] arguments) {
 
         GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver =
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArraySlice.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArraySlice.java
new file mode 100644
index 00000000000..9bdc3fc03d4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArraySlice.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * GenericUDFArraySlice.
+ */
+@Description(name = "array_slice", value = "_FUNC_(array, start, length) - Returns the subset or range of elements from"
+    + " an array (subarray).", extended = "Example:\n" + "  > SELECT _FUNC_(array(1, 2, 3,4), 2,2) FROM src LIMIT 1;\n"
+    + "  3,4")
+public class GenericUDFArraySlice extends AbstractGenericUDFArrayBase {
+  private static final String FUNC_NAME = "ARRAY_SLICE";
+  private static final int START_IDX = 1;
+  private static final int LENGTH_IDX = 2;
+
+  public GenericUDFArraySlice() {
+    super(FUNC_NAME, 3, 3, ObjectInspector.Category.LIST);
+  }
+
+  @Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    ObjectInspector defaultOI = super.initialize(arguments);
+    // Check whether start and length inputs are of integer type
+    checkArgIntPrimitiveCategory((PrimitiveObjectInspector) arguments[START_IDX], FUNC_NAME, START_IDX);
+    checkArgIntPrimitiveCategory((PrimitiveObjectInspector) arguments[LENGTH_IDX], FUNC_NAME, LENGTH_IDX);
+    return defaultOI;
+  }
+
+  @Override public Object evaluate(DeferredObject[] arguments) throws HiveException {
+
+    Object array = arguments[ARRAY_IDX].get();
+    if (arrayOI.getListLength(array) == 0) {
+      return Collections.emptyList();
+    } else if (arrayOI.getListLength(array) < 0) {
+      return null;
+    }
+
+    List<?> retArray = ((ListObjectInspector) argumentOIs[ARRAY_IDX]).getList(array);
+    int start = ((IntObjectInspector) argumentOIs[START_IDX]).get(arguments[START_IDX].get());
+    int length = ((IntObjectInspector) argumentOIs[LENGTH_IDX]).get(arguments[LENGTH_IDX].get());
+    // return empty list if start/length are out of range of the array
+    if (start + length > retArray.size()) {
+      return Collections.emptyList();
+    }
+    return retArray.subList(start, start + length).stream().map(o -> converter.convert(o)).collect(Collectors.toList());
+  }
+}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArraySlice.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArraySlice.java
new file mode 100644
index 00000000000..43d84bf62d9
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFArraySlice.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static java.util.Arrays.asList;
+
+public class TestGenericUDFArraySlice {
+  private final GenericUDFArraySlice udf = new GenericUDFArraySlice();
+
+  @Test public void testPrimitive() throws HiveException {
+    ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector),
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector };
+    udf.initialize(inputOIs);
+
+    Object i1 = new IntWritable(3);
+    Object i2 = new IntWritable(1);
+    Object i3 = new IntWritable(2);
+    Object i4 = new IntWritable(1);
+
+    runAndVerify(asList(i1, i2, i3, i4), 2, 2, asList(i3, i4));
+    i1 = new FloatWritable(3.3f);
+    i2 = new FloatWritable(1.1f);
+    i3 = new FloatWritable(3.3f);
+    i4 = new FloatWritable(2.20f);
+    runAndVerify(asList(i1, i2, i3, i4), 1, 3, asList(i2, i3, i4));
+  }
+
+  @Test public void testList() throws HiveException {
+    ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardListObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector)),
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector };
+    udf.initialize(inputOIs);
+
+    Object i1 = asList(new Text("aa1"), new Text("dd"), new Text("cc"), new Text("bb"));
+    Object i2 = asList(new Text("aa2"), new Text("cc"), new Text("ba"), new Text("dd"));
+    Object i3 = asList(new Text("aa3"), new Text("cc"), new Text("dd"), new Text("ee"), new Text("bb"));
+    Object i4 = asList(new Text("aa4"), new Text("cc"), new Text("ddd"), new Text("bb"));
+    runAndVerify(asList(i1, i2, i2, i3, i4, i4), 1, 4, asList(i2, i2, i3, i4));
+  }
+
+  @Test public void testStruct() throws HiveException {
+    ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1", "f2", "f3", "f4"),
+            asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+                PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
+                PrimitiveObjectInspectorFactory.writableDateObjectInspector,
+                ObjectInspectorFactory.getStandardListObjectInspector(
+                    PrimitiveObjectInspectorFactory.writableIntObjectInspector)))),
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector };
+    udf.initialize(inputOIs);
+
+    Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritableV2(Date.of(2015, 5, 26)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
+
+    Object i2 = asList(new Text("b"), new DoubleWritable(3.14), new DateWritableV2(Date.of(2015, 5, 26)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
+
+    Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritableV2(Date.of(2015, 5, 25)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(5)));
+
+    Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritableV2(Date.of(2015, 5, 25)),
+        asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
+
+    runAndVerify(asList(i1, i3, i2, i3, i4, i2), 1, 1, asList(i3));
+  }
+
+  @Test public void testxMap() throws HiveException {
+    ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(
+        ObjectInspectorFactory.getStandardMapObjectInspector(
+            PrimitiveObjectInspectorFactory.writableStringObjectInspector,
+            PrimitiveObjectInspectorFactory.writableIntObjectInspector)),
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector,
+        PrimitiveObjectInspectorFactory.writableIntObjectInspector };
+    udf.initialize(inputOIs);
+
+    Map<Text, IntWritable> m1 = new HashMap<Text, IntWritable>();
+    m1.put(new Text("a"), new IntWritable(4));
+    m1.put(new Text("b"), new IntWritable(3));
+    m1.put(new Text("c"), new IntWritable(1));
+    m1.put(new Text("d"), new IntWritable(2));
+
+    Map<Text, IntWritable> m2 = new HashMap<Text, IntWritable>();
+    m2.put(new Text("d"), new IntWritable(4));
+    m2.put(new Text("b"), new IntWritable(3));
+    m2.put(new Text("a"), new IntWritable(1));
+    m2.put(new Text("c"), new IntWritable(2));
+
+    Map<Text, IntWritable> m3 = new HashMap<Text, IntWritable>();
+    m3.put(new Text("d"), new IntWritable(4));
+    m3.put(new Text("b"), new IntWritable(3));
+    m3.put(new Text("a"), new IntWritable(1));
+
+    runAndVerify(asList(m1, m3, m2, m3, m1), 2, 1, asList(m2));
+  }
+
+  private void runAndVerify(List<Object> actual, Integer start, Integer length, List<Object> expected)
+      throws HiveException {
+    GenericUDF.DeferredJavaObject[] args = { new GenericUDF.DeferredJavaObject(actual),
+        new GenericUDF.DeferredJavaObject(start != null ? new IntWritable(start) : null),
+        new GenericUDF.DeferredJavaObject(length != null ? new IntWritable(length) : null) };
+    List<Object> result = (List<Object>) udf.evaluate(args);
+    Assert.assertArrayEquals("Check content", expected.toArray(), result.toArray());
+  }
+}
diff --git a/ql/src/test/queries/clientpositive/udf_array_slice.q b/ql/src/test/queries/clientpositive/udf_array_slice.q
new file mode 100644
index 00000000000..d52152b16f5
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udf_array_slice.q
@@ -0,0 +1,38 @@
+--! qt:dataset:src
+
+-- SORT_QUERY_RESULTS
+
+set hive.fetch.task.conversion=more;
+
+DESCRIBE FUNCTION array_slice;
+DESCRIBE FUNCTION EXTENDED array_slice;
+
+-- evalutes function for array of primitives
+SELECT array_slice(array(1, 2, 3, null,3,4),2,2) FROM src tablesample (1 rows);
+
+SELECT array_slice(array(),1,1) FROM src tablesample (1 rows);
+
+SELECT array_slice(array(null),1,1) FROM src tablesample (1 rows);
+
+SELECT array_slice(array(1.12, 2.23, 3.34, null,1.11,1.12,2.9),3,2) FROM src tablesample (1 rows);
+
+SELECT array_slice(array(1.1234567890, 2.234567890, 3.34567890, null, 3.3456789, 2.234567,1.1234567890),3,3) FROM src tablesample (1 rows);
+
+SELECT array_slice(array(11234567890, 2234567890, 334567890, null, 11234567890, 2234567890, 334567890, null),2,1) FROM src tablesample (1 rows);
+
+SELECT array_slice(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")),2,2) FROM src tablesample (1 rows);
+
+# handle null array cases
+
+dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/test_null_array;
+
+dfs -copyFromLocal ../../data/files/test_null_array.csv ${system:test.tmp.dir}/test_null_array/;
+
+create external table test_null_array (id int, value Array<String>) ROW FORMAT DELIMITED
+ FIELDS TERMINATED BY ':' collection items terminated by ',' location '${system:test.tmp.dir}/test_null_array';
+
+select value from test_null_array;
+
+select array_slice(value,1,1) from test_null_array;
+
+dfs -rm -r ${system:test.tmp.dir}/test_null_array;
\ No newline at end of file
diff --git a/ql/src/test/results/clientpositive/llap/show_functions.q.out b/ql/src/test/results/clientpositive/llap/show_functions.q.out
index 095b3bd9b04..457ddbf2ae7 100644
--- a/ql/src/test/results/clientpositive/llap/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/llap/show_functions.q.out
@@ -50,6 +50,7 @@ array_contains
 array_distinct
 array_max
 array_min
+array_slice
 ascii
 asin
 assert_true
@@ -549,6 +550,7 @@ PREHOOK: query: SHOW FUNCTIONS LIKE '%e'
 PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: SHOW FUNCTIONS LIKE '%e'
 POSTHOOK: type: SHOWFUNCTIONS
+array_slice
 assert_true
 case
 coalesce
@@ -668,6 +670,7 @@ array_contains
 array_distinct
 array_max
 array_min
+array_slice
 ascii
 asin
 assert_true
diff --git a/ql/src/test/results/clientpositive/llap/udf_array_slice.q.out b/ql/src/test/results/clientpositive/llap/udf_array_slice.q.out
new file mode 100644
index 00000000000..3fc50e2e3fc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/udf_array_slice.q.out
@@ -0,0 +1,112 @@
+PREHOOK: query: DESCRIBE FUNCTION array_slice
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION array_slice
+POSTHOOK: type: DESCFUNCTION
+array_slice(array, start, length) - Returns the subset or range of elements from an array (subarray).
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED array_slice
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED array_slice
+POSTHOOK: type: DESCFUNCTION
+array_slice(array, start, length) - Returns the subset or range of elements from an array (subarray).
+Example:
+  > SELECT array_slice(array(1, 2, 3,4), 2,2) FROM src LIMIT 1;
+  3,4
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFArraySlice
+Function type:BUILTIN
+PREHOOK: query: SELECT array_slice(array(1, 2, 3, null,3,4),2,2) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(1, 2, 3, null,3,4),2,2) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[3,null]
+PREHOOK: query: SELECT array_slice(array(),1,1) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(),1,1) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[]
+PREHOOK: query: SELECT array_slice(array(null),1,1) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(null),1,1) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[]
+PREHOOK: query: SELECT array_slice(array(1.12, 2.23, 3.34, null,1.11,1.12,2.9),3,2) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(1.12, 2.23, 3.34, null,1.11,1.12,2.9),3,2) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[null,1.11]
+PREHOOK: query: SELECT array_slice(array(1.1234567890, 2.234567890, 3.34567890, null, 3.3456789, 2.234567,1.1234567890),3,3) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(1.1234567890, 2.234567890, 3.34567890, null, 3.3456789, 2.234567,1.1234567890),3,3) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[null,3.3456789,2.234567]
+PREHOOK: query: SELECT array_slice(array(11234567890, 2234567890, 334567890, null, 11234567890, 2234567890, 334567890, null),2,1) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(11234567890, 2234567890, 334567890, null, 11234567890, 2234567890, 334567890, null),2,1) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[334567890]
+PREHOOK: query: SELECT array_slice(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")),2,2) FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT array_slice(array(array("a","b","c","d"),array("a","b","c","d"),array("a","b","c","d","e"),null,array("e","a","b","c","d")),2,2) FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+[["a","b","c","d","e"],null]
+PREHOOK: query: create external table test_null_array (id int, value Array<String>) ROW FORMAT DELIMITED
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_null_array
+POSTHOOK: query: create external table test_null_array (id int, value Array<String>) ROW FORMAT DELIMITED
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+#### A masked pattern was here ####
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_null_array
+PREHOOK: query: select value from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select value from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+["NULL"]
+["null","null"]
+[]
+PREHOOK: query: select array_slice(value,1,1) from test_null_array
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+POSTHOOK: query: select array_slice(value,1,1) from test_null_array
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_null_array
+#### A masked pattern was here ####
+["null"]
+[]
+[]