You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2009/11/03 09:53:19 UTC

svn commit: r832345 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpo...

Author: zshao
Date: Tue Nov  3 08:53:18 2009
New Revision: 832345

URL: http://svn.apache.org/viewvc?rev=832345&view=rev
Log:
HIVE-554. Add GenericUDF to create arrays, maps. (Paul Yang via zshao)

Added:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_array.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_map.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_array.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_map.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=832345&r1=832344&r2=832345&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Tue Nov  3 08:53:18 2009
@@ -73,6 +73,8 @@
 
     HIVE-873. Better error messages for Hive cmdline. (Paul Yang via zshao)
 
+    HIVE-554. Add GenericUDF to create arrays, maps. (Paul Yang via zshao)
+
   IMPROVEMENTS
 
     HIVE-760. Add version info to META-INF/MANIFEST.MF.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=832345&r1=832344&r2=832345&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Tue Nov  3 08:53:18 2009
@@ -166,7 +166,7 @@
     registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);
 
     registerGenericUDF("if", GenericUDFIf.class);
-
+    
     // Aliases for Java Class Names
     // These are used in getImplicitConvertUDFMethod
     registerUDF(Constants.BOOLEAN_TYPE_NAME, UDFToBoolean.class, false,
@@ -203,6 +203,9 @@
     registerUDAF("min", UDAFMin.class);
     
     // Generic UDFs
+    registerGenericUDF("array", GenericUDFArray.class);
+    registerGenericUDF("map", GenericUDFMap.class);
+
     registerGenericUDF("case", GenericUDFCase.class);
     registerGenericUDF("when", GenericUDFWhen.class);
     registerGenericUDF("hash", GenericUDFHash.class);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=832345&r1=832344&r2=832345&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Tue Nov  3 08:53:18 2009
@@ -1015,7 +1015,7 @@
 @init { msgs.push("function name"); }
 @after { msgs.pop(); }
     : // Keyword IF is also a function name
-    Identifier | KW_IF
+    Identifier | KW_IF | KW_ARRAY | KW_MAP
     ;
 
 castExpression

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java?rev=832345&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFArray.java Tue Nov  3 08:53:18 2009
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.description;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+
+@description(
+    name = "array",
+    value = "_FUNC_(n0, n1...) - Creates an array with the given elements "
+)
+
+public class GenericUDFArray extends GenericUDF {
+  
+  Converter[] converters;
+  ArrayList<Object> ret = new ArrayList<Object>();
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+  throws UDFArgumentException {
+
+    GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
+
+    returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
+
+    for(int i=0; i<arguments.length; i++) {
+      if(!returnOIResolver.update(arguments[i])) {
+        throw new UDFArgumentTypeException(i, "Argument type \"" + arguments[i].getTypeName() + 
+            "\" is different from preceding arguments. " + 
+            "Previous type was \"" + arguments[i-1].getTypeName() + "\"");
+      }
+    }
+    
+    converters = new Converter[arguments.length];
+
+    ObjectInspector returnOI = returnOIResolver.get();
+    if(returnOI == null) {
+      returnOI = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING);
+    }
+    for(int i=0; i<arguments.length; i++) {
+      converters[i] = ObjectInspectorConverters.getConverter(arguments[i], returnOI);
+    }
+    
+    return ObjectInspectorFactory.getStandardListObjectInspector(
+        returnOI);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    ret.clear();
+    for(int i=0; i<arguments.length; i++) {
+      ret.add(converters[i].convert(arguments[i].get()));
+    }
+    return ret;
+
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    StringBuilder sb = new StringBuilder();
+    sb.append("array(");
+    for(int i=0; i<children.length; i++) {
+      sb.append(children[i]);
+      if(i+1 != children.length) {
+        sb.append(",");
+      }
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+}

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java?rev=832345&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMap.java Tue Nov  3 08:53:18 2009
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.HashMap;
+
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.exec.description;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+@description(
+    name = "map",
+    value = "_FUNC_(key0, value0, key1, value1...) - Creates a map with the given key/value pairs "
+)
+
+public class GenericUDFMap extends GenericUDF {
+  Converter[] converters;
+  HashMap<Object, Object> ret = new HashMap<Object, Object>();
+  
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments)
+  throws UDFArgumentException {
+    
+    if (arguments.length % 2 != 0) {
+      throw new UDFArgumentLengthException(
+          "Arguments must be in key/value pairs");
+    }
+
+    GenericUDFUtils.ReturnObjectInspectorResolver keyOIResolver = 
+      new GenericUDFUtils.ReturnObjectInspectorResolver(true);
+    GenericUDFUtils.ReturnObjectInspectorResolver valueOIResolver = 
+      new GenericUDFUtils.ReturnObjectInspectorResolver(true);
+
+    for(int i=0; i<arguments.length; i++) {
+      if(i%2==0) {
+        // Keys
+        if (!(arguments[i] instanceof PrimitiveObjectInspector)) {
+          throw new UDFArgumentTypeException(1,
+              "Primitive Type is expected but " + arguments[i].getTypeName()
+              + "\" is found");
+        }
+        if(!keyOIResolver.update(arguments[i])) {
+          throw new UDFArgumentTypeException(i, "Key type \"" + arguments[i].getTypeName() + 
+              "\" is different from preceding key types. " + 
+              "Previous key type was \"" + arguments[i-2].getTypeName() + "\"");
+        }
+      } else {
+        // Values
+        if(!valueOIResolver.update(arguments[i])) {
+          throw new UDFArgumentTypeException(i, "Value type \"" + arguments[i].getTypeName() + 
+              "\" is different from preceding value types. " + 
+              "Previous value type was \"" + arguments[i-2].getTypeName() + "\"");
+        }
+      }
+    }
+    
+    ObjectInspector keyOI = keyOIResolver.get();
+    ObjectInspector valueOI = valueOIResolver.get();
+    
+    if(keyOI == null) {
+      keyOI = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING);
+    }
+    if(valueOI == null) {
+      valueOI = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING);
+    }
+    
+    converters = new Converter[arguments.length];
+    
+    for(int i=0; i<arguments.length; i++) {
+      converters[i] = ObjectInspectorConverters.getConverter(arguments[i], 
+        i%2==0 ? keyOI : valueOI);
+    }
+    
+    return ObjectInspectorFactory.getStandardMapObjectInspector(
+        keyOI, valueOI);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    ret.clear();    
+    for(int i=0; i<arguments.length; i+=2) {
+      ret.put(converters[i].convert(arguments[i].get()),
+          converters[i+1].convert(arguments[i+1].get()));
+    }
+    return ret;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    StringBuilder sb = new StringBuilder();
+    sb.append("map(");
+    assert(children.length%2 == 0);
+    for(int i=0; i<children.length; i+=2) {
+      sb.append(children[i]);
+      sb.append(":");
+      sb.append(children[i+1]);
+      if(i+2 != children.length) {
+        sb.append(",");
+      }
+    }
+    sb.append(")");
+    return sb.toString();
+  }
+}

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_array.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_array.q?rev=832345&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_array.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_array.q Tue Nov  3 08:53:18 2009
@@ -0,0 +1,5 @@
+EXPLAIN SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2],
+array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1;
+
+SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2],
+array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1;

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_map.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_map.q?rev=832345&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_map.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/udf_map.q Tue Nov  3 08:53:18 2009
@@ -0,0 +1,5 @@
+EXPLAIN SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), 
+map(1, "a", 2, "b", 3, "c")[2],  map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1;
+
+SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), 
+map(1, "a", 2, "b", 3, "c")[2],  map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1;

Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out?rev=832345&r1=832344&r2=832345&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out Tue Nov  3 08:53:18 2009
@@ -21,6 +21,7 @@
 abs
 acos
 and
+array
 ascii
 asin
 avg
@@ -70,6 +71,7 @@
 lower
 lpad
 ltrim
+map
 max
 min
 minute

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_array.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_array.q.out?rev=832345&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_array.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_array.q.out Tue Nov  3 08:53:18 2009
@@ -0,0 +1,61 @@
+PREHOOK: query: EXPLAIN SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2],
+array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2],
+array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION array)) (TOK_SELEXPR ([ (TOK_FUNCTION array) 1)) (TOK_SELEXPR (TOK_FUNCTION array 1 2 3)) (TOK_SELEXPR ([ (TOK_FUNCTION array 1 2 3) 2)) (TOK_SELEXPR (TOK_FUNCTION array 1 "a" 2 3)) (TOK_SELEXPR ([ (TOK_FUNCTION array 1 "a" 2 3) 2)) (TOK_SELEXPR ([ ([ (TOK_FUNCTION array (TOK_FUNCTION array 1) (TOK_FUNCTION array 2) (TOK_FUNCTION array 3) (TOK_FUNCTION array 4)) 1) 0))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: array()
+                    type: array<string>
+                    expr: array()[1]
+                    type: string
+                    expr: array(1,2,3)
+                    type: array<int>
+                    expr: array(1,2,3)[2]
+                    type: int
+                    expr: array(1,'a',2,3)
+                    type: array<string>
+                    expr: array(1,'a',2,3)[2]
+                    type: string
+                    expr: array(array(1),array(2),array(3),array(4))[1][0]
+                    type: int
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2],
+array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/pyang/trunk-HIVE-554/VENDOR.hive/trunk/build/ql/tmp/1854297096/10000
+POSTHOOK: query: SELECT array(), array()[1], array(1, 2, 3), array(1, 2, 3)[2], array(1,"a", 2, 3), array(1,"a", 2, 3)[2],
+array(array(1), array(2), array(3), array(4))[1][0] FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/pyang/trunk-HIVE-554/VENDOR.hive/trunk/build/ql/tmp/1854297096/10000
+[]	NULL	[1,2,3]	3	["1","a","2","3"]	2	2

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_map.q.out?rev=832345&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_map.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/udf_map.q.out Tue Nov  3 08:53:18 2009
@@ -0,0 +1,59 @@
+PREHOOK: query: EXPLAIN SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), 
+map(1, "a", 2, "b", 3, "c")[2],  map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), 
+map(1, "a", 2, "b", 3, "c")[2],  map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION map)) (TOK_SELEXPR (TOK_FUNCTION map 1 "a" 2 "b" 3 "c")) (TOK_SELEXPR (TOK_FUNCTION map 1 2 "a" "b")) (TOK_SELEXPR ([ (TOK_FUNCTION map 1 "a" 2 "b" 3 "c") 2)) (TOK_SELEXPR ([ (TOK_FUNCTION map 1 2 "a" "b") "a")) (TOK_SELEXPR ([ ([ (TOK_FUNCTION map 1 (TOK_FUNCTION array "a")) 1) 0))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: map()
+                    type: map<string,string>
+                    expr: map(1:'a',2:'b',3:'c')
+                    type: map<int,string>
+                    expr: map(1:2,'a':'b')
+                    type: map<string,string>
+                    expr: map(1:'a',2:'b',3:'c')[2]
+                    type: string
+                    expr: map(1:2,'a':'b')['a']
+                    type: string
+                    expr: map(1:array('a'))[1][0]
+                    type: string
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), 
+map(1, "a", 2, "b", 3, "c")[2],  map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: file:/data/users/pyang/trunk-HIVE-554/VENDOR.hive/trunk/build/ql/tmp/802535376/10000
+POSTHOOK: query: SELECT map(), map(1, "a", 2, "b", 3, "c"), map(1, 2, "a", "b"), 
+map(1, "a", 2, "b", 3, "c")[2],  map(1, 2, "a", "b")["a"], map(1, array("a"))[1][0] FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: file:/data/users/pyang/trunk-HIVE-554/VENDOR.hive/trunk/build/ql/tmp/802535376/10000
+{}	{1:"a",2:"b",3:"c"}	{"1":"2","a":"b"}	b	b	a