You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ec...@apache.org on 2013/06/15 02:59:05 UTC

svn commit: r1493292 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ java/org/apache/hadoop/hive/ql/udf/generic/ test/org/apache/hadoop/hive/ql/parse/ test/org/apa...

Author: ecapriolo
Date: Sat Jun 15 00:59:04 2013
New Revision: 1493292

URL: http://svn.apache.org/r1493292
Log:


Submitted by: Brock Noland Jonathon Chang	
Reviewed by: Edward Capriolo	
Approved by: Edward Capriolo	

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java
    hive/trunk/ql/src/test/queries/clientnegative/macro_unused_parameter.q
    hive/trunk/ql/src/test/queries/clientpositive/macro.q
    hive/trunk/ql/src/test/queries/negative/macro_reserved_word.q
    hive/trunk/ql/src/test/results/clientnegative/macro_unused_parameter.q.out
    hive/trunk/ql/src/test/results/clientpositive/macro.q.out
    hive/trunk/ql/src/test/results/compiler/errors/macro_reserved_word.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java Sat Jun 15 00:59:04 2013
@@ -90,6 +90,13 @@ public abstract class ExprNodeEvaluator<
   public boolean isDeterministic() {
     return true;
   }
+  
+  /**
+   * Return whether this node (or any children nodes) are stateful.
+   */
+  public boolean isStateful() {
+    return false;
+  }
 
   /**
    * Return child evaluators if exist

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java Sat Jun 15 00:59:04 2013
@@ -142,6 +142,17 @@ public class ExprNodeGenericFuncEvaluato
   }
 
   @Override
+  public boolean isStateful() {
+    boolean result = FunctionRegistry.isStateful(genericUDF);
+    for (ExprNodeEvaluator child : children) {
+      if(result = result || child.isStateful()) {
+        return result;
+      }
+    }
+    return result;
+  }
+
+  @Override
   protected Object _evaluate(Object row, int version) throws HiveException {
     rowObject = row;
     if (ObjectInspectorUtils.isConstantObjectInspector(outputOI) &&

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Sat Jun 15 00:59:04 2013
@@ -1105,6 +1105,10 @@ public final class FunctionRegistry {
       GenericUDFBridge bridge = (GenericUDFBridge) genericUDF;
       return new GenericUDFBridge(bridge.getUdfName(), bridge.isOperator(),
           bridge.getUdfClass());
+    } else if (genericUDF instanceof GenericUDFMacro) {
+      GenericUDFMacro bridge = (GenericUDFMacro) genericUDF;
+      return new GenericUDFMacro(bridge.getMacroName(), bridge.getBody(),
+          bridge.getColNames(), bridge.getColTypes());
     }
 
     return (GenericUDF) ReflectionUtils
@@ -1173,6 +1177,11 @@ public final class FunctionRegistry {
       }
     }
 
+    if (genericUDF instanceof GenericUDFMacro) {
+      GenericUDFMacro macro = (GenericUDFMacro) (genericUDF);
+      return macro.isDeterministic();
+    }
+
     return true;
   }
 
@@ -1193,6 +1202,11 @@ public final class FunctionRegistry {
       }
     }
 
+    if (genericUDF instanceof GenericUDFMacro) {
+      GenericUDFMacro macro = (GenericUDFMacro) (genericUDF);
+      return macro.isStateful();
+    }
+
     return false;
   }
 
@@ -1303,6 +1317,27 @@ public final class FunctionRegistry {
   }
 
   /**
+   * Registers thae appropriate kind of temporary function based on a class's
+   * type.
+   *
+   * @param macroName name under which to register the macro
+   *
+   * @param body the expression which the macro evaluates to
+   *
+   * @param colNames the names of the arguments to the macro
+   *
+   * @param colTypes the types of the arguments to the macro
+   */
+  public static void registerTemporaryMacro(
+    String macroName, ExprNodeDesc body,
+    List<String> colNames, List<TypeInfo> colTypes) {
+
+    FunctionInfo fI = new FunctionInfo(false, macroName,
+        new GenericUDFMacro(macroName, body, colNames, colTypes));
+    mFunctions.put(macroName.toLowerCase(), fI);
+  }
+
+  /**
    * Registers Hive functions from a plugin jar, using metadata from
    * the jar's META-INF/class-info.xml.
    *

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java Sat Jun 15 00:59:04 2013
@@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.QueryPl
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
+import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
+import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
@@ -67,6 +69,16 @@ public class FunctionTask extends Task<F
     if (dropFunctionDesc != null) {
       return dropFunction(dropFunctionDesc);
     }
+
+    CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
+    if (createMacroDesc != null) {
+      return createMacro(createMacroDesc);
+    }
+
+    DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
+    if (dropMacroDesc != null) {
+      return dropMacro(dropMacroDesc);
+    }
     return 0;
   }
 
@@ -89,6 +101,26 @@ public class FunctionTask extends Task<F
     }
   }
 
+  private int createMacro(CreateMacroDesc createMacroDesc) {
+    FunctionRegistry.registerTemporaryMacro(
+      createMacroDesc.getMacroName(),
+      createMacroDesc.getBody(),
+      createMacroDesc.getColNames(),
+      createMacroDesc.getColTypes());
+    return 0;
+  }
+
+  private int dropMacro(DropMacroDesc dropMacroDesc) {
+    try {
+      FunctionRegistry.unregisterTemporaryUDF(dropMacroDesc
+          .getMacroName());
+      return 0;
+    } catch (HiveException e) {
+      LOG.info("drop macro: " + StringUtils.stringifyException(e));
+      return 1;
+    }
+  }
+
   private int dropFunction(DropFunctionDesc dropFunctionDesc) {
     try {
       FunctionRegistry.unregisterTemporaryUDF(dropFunctionDesc

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Sat Jun 15 00:59:04 2013
@@ -72,6 +72,7 @@ import org.apache.hadoop.mapred.TextInpu
  * BaseSemanticAnalyzer.
  *
  */
+@SuppressWarnings("deprecation")
 public abstract class BaseSemanticAnalyzer {
   protected final Hive db;
   protected final HiveConf conf;
@@ -551,20 +552,22 @@ public abstract class BaseSemanticAnalyz
     for (int i = 0; i < numCh; i++) {
       FieldSchema col = new FieldSchema();
       ASTNode child = (ASTNode) ast.getChild(i);
-
-      String name = child.getChild(0).getText();
-      if(lowerCase) {
-        name = name.toLowerCase();
-      }
-      // child 0 is the name of the column
-      col.setName(unescapeIdentifier(name));
-      // child 1 is the type of the column
-      ASTNode typeChild = (ASTNode) (child.getChild(1));
-      col.setType(getTypeStringFromAST(typeChild));
-
-      // child 2 is the optional comment of the column
-      if (child.getChildCount() == 3) {
-        col.setComment(unescapeSQLString(child.getChild(2).getText()));
+      Tree grandChild = child.getChild(0);
+      if(grandChild != null) {
+        String name = grandChild.getText();
+        if(lowerCase) {
+          name = name.toLowerCase();
+        }
+        // child 0 is the name of the column
+        col.setName(unescapeIdentifier(name));
+        // child 1 is the type of the column
+        ASTNode typeChild = (ASTNode) (child.getChild(1));
+        col.setType(getTypeStringFromAST(typeChild));
+
+        // child 2 is the optional comment of the column
+        if (child.getChildCount() == 3) {
+          col.setComment(unescapeSQLString(child.getChild(2).getText()));
+        }        
       }
       colList.add(col);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Sat Jun 15 00:59:04 2013
@@ -155,6 +155,7 @@ KW_RLIKE: 'RLIKE';
 KW_REGEXP: 'REGEXP';
 KW_TEMPORARY: 'TEMPORARY';
 KW_FUNCTION: 'FUNCTION';
+KW_MACRO: 'MACRO';
 KW_EXPLAIN: 'EXPLAIN';
 KW_EXTENDED: 'EXTENDED';
 KW_FORMATTED: 'FORMATTED';

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sat Jun 15 00:59:04 2013
@@ -205,6 +205,8 @@ TOK_STRINGLITERALSEQUENCE;
 TOK_CHARSETLITERAL;
 TOK_CREATEFUNCTION;
 TOK_DROPFUNCTION;
+TOK_CREATEMACRO;
+TOK_DROPMACRO;
 TOK_CREATEVIEW;
 TOK_DROPVIEW;
 TOK_ALTERVIEW_AS;
@@ -606,9 +608,11 @@ ddlStatement
     | createViewStatement
     | dropViewStatement
     | createFunctionStatement
+    | createMacroStatement
     | createIndexStatement
     | dropIndexStatement
     | dropFunctionStatement
+    | dropMacroStatement
     | analyzeStatement
     | lockStatement
     | unlockStatement
@@ -1386,6 +1390,21 @@ dropFunctionStatement
     -> ^(TOK_DROPFUNCTION identifier ifExists?)
     ;
 
+createMacroStatement
+@init { msgs.push("create macro statement"); }
+@after { msgs.pop(); }
+    : KW_CREATE KW_TEMPORARY KW_MACRO Identifier
+      LPAREN columnNameTypeList? RPAREN expression
+    -> ^(TOK_CREATEMACRO Identifier columnNameTypeList? expression)
+    ;
+
+dropMacroStatement
+@init { msgs.push("drop macro statement"); }
+@after { msgs.pop(); }
+    : KW_DROP KW_TEMPORARY KW_MACRO ifExists? Identifier
+    -> ^(TOK_DROPMACRO Identifier ifExists?)
+    ;
+
 createViewStatement
 @init {
     msgs.push("create view statement");

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.Stack;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
+import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
+import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.FunctionWork;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * MacroSemanticAnalyzer.
+ *
+ */
+public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer {
+  private static final Log LOG = LogFactory
+      .getLog(MacroSemanticAnalyzer.class);
+
+  public MacroSemanticAnalyzer(HiveConf conf) throws SemanticException {
+    super(conf);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode ast) throws SemanticException {
+    if (ast.getToken().getType() == HiveParser.TOK_CREATEMACRO) {
+      LOG.debug("Analyzing create macro " + ast.dump());
+      analyzeCreateMacro(ast);
+    }
+    if (ast.getToken().getType() == HiveParser.TOK_DROPMACRO) {
+      LOG.debug("Analyzing drop macro " + ast.dump());
+      analyzeDropMacro(ast);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
+    String functionName = ast.getChild(0).getText();
+    List<FieldSchema> arguments =
+      BaseSemanticAnalyzer.getColumns((ASTNode)ast.getChild(1), true);
+    boolean isNoArgumentMacro = arguments.size() == 0;
+    RowResolver rowResolver = new RowResolver();
+    ArrayList<String> macroColNames = new ArrayList<String>(arguments.size());
+    ArrayList<TypeInfo> macroColTypes = new ArrayList<TypeInfo>(arguments.size());
+    final Set<String> actualColumnNames = new HashSet<String>();
+    if(!isNoArgumentMacro) {
+      /*
+       * Walk down expression to see which arguments are actually used.
+       */
+      Node expression = (Node) ast.getChild(2);
+      PreOrderWalker walker = new PreOrderWalker(new Dispatcher() {
+        @Override
+        public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
+            throws SemanticException {
+          if(nd instanceof ASTNode) {
+            ASTNode node = (ASTNode)nd;
+            if(node.getType() == HiveParser.TOK_TABLE_OR_COL) {
+              actualColumnNames.add(node.getChild(0).getText());
+            }
+          }
+          return null;
+        }
+      });
+      walker.startWalking(Collections.singletonList(expression), null);
+    }
+    for (FieldSchema argument : arguments) {
+      TypeInfo colType =
+          TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
+      rowResolver.put("", argument.getName(),
+          new ColumnInfo(argument.getName(), colType, "", false));
+      macroColNames.add(argument.getName());
+      macroColTypes.add(colType);
+    }
+    Set<String> expectedColumnNames = new HashSet<String>(macroColNames);
+    if(!expectedColumnNames.equals(actualColumnNames)) {
+      throw new SemanticException("Expected columns " + expectedColumnNames + " but found "
+          + actualColumnNames);
+    }
+    if(expectedColumnNames.size() != macroColNames.size()) {
+      throw new SemanticException("At least one parameter name was used more than once "
+          + macroColNames);
+    }
+    SemanticAnalyzer sa = new SemanticAnalyzer(conf);
+    ExprNodeDesc body;
+    if(isNoArgumentMacro) {
+      body = sa.genExprNodeDesc((ASTNode)ast.getChild(1), rowResolver);
+    } else {
+        body = sa.genExprNodeDesc((ASTNode)ast.getChild(2), rowResolver);
+    }
+    CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, macroColTypes, body);
+    rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
+  }
+
+  @SuppressWarnings("unchecked")
+  private void analyzeDropMacro(ASTNode ast) throws SemanticException {
+    String functionName = ast.getChild(0).getText();
+    boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
+    // we want to signal an error if the function doesn't exist and we're
+    // configured not to ignore this
+    boolean throwException =
+      !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROPIGNORESNONEXISTENT);
+    if (throwException && FunctionRegistry.getFunctionInfo(functionName) == null) {
+      throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
+    }
+
+    DropMacroDesc desc = new DropMacroDesc(functionName);
+    rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sat Jun 15 00:59:04 2013
@@ -882,7 +882,7 @@ public class SemanticAnalyzer extends Ba
               ErrorMsg.ORDERBY_DISTRIBUTEBY_CONFLICT.getMsg()));
         }
         break;
-
+        
       case HiveParser.TOK_SORTBY:
      // Get the sort by aliases - these are aliased to the entries in the
         // select list

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Sat Jun 15 00:59:04 2013
@@ -70,6 +70,8 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_SHOWLOCKS, HiveOperation.SHOWLOCKS);
     commandType.put(HiveParser.TOK_CREATEFUNCTION, HiveOperation.CREATEFUNCTION);
     commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION);
+    commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO);
+    commandType.put(HiveParser.TOK_DROPMACRO, HiveOperation.DROPMACRO);
     commandType.put(HiveParser.TOK_CREATEVIEW, HiveOperation.CREATEVIEW);
     commandType.put(HiveParser.TOK_DROPVIEW, HiveOperation.DROPVIEW);
     commandType.put(HiveParser.TOK_CREATEINDEX, HiveOperation.CREATEINDEX);
@@ -223,6 +225,9 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_ANALYZE:
         return new ColumnStatsSemanticAnalyzer(conf, tree);
 
+      case HiveParser.TOK_CREATEMACRO:
+      case HiveParser.TOK_DROPMACRO:
+        return new MacroSemanticAnalyzer(conf);
       default:
         return new SemanticAnalyzer(conf);
       }

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
+/**
+ * CreateMacroDesc.
+ *
+ */
+@Explain(displayName = "Create Macro")
+public class CreateMacroDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private String macroName;
+  private List<String> colNames;
+  private List<TypeInfo> colTypes;
+  private ExprNodeDesc body;
+
+  /**
+   * For serialization only.
+   */
+  public CreateMacroDesc() {
+  }
+
+  public CreateMacroDesc(String macroName,
+                         List<String> colNames,
+                         List<TypeInfo> colTypes,
+                         ExprNodeDesc body) {
+    this.macroName = macroName;
+    this.colNames = colNames;
+    this.colTypes = colTypes;
+    this.body = body;
+  }
+
+  @Explain(displayName = "name")
+  public String getMacroName() {
+    return macroName;
+  }
+
+  public ExprNodeDesc getBody() {
+    return body;
+  }
+
+  public List<String> getColNames() {
+    return colNames;
+  }
+
+  public List<TypeInfo> getColTypes() {
+    return colTypes;
+  }
+
+}

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+/**
+ * DropMacroDesc.
+ *
+ */
+@Explain(displayName = "Drop Macro")
+public class DropMacroDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private String macroName;
+
+  /**
+   * For serialization only.
+   */
+  public DropMacroDesc() {
+  }
+
+  public DropMacroDesc(String macroName) {
+    this.macroName = macroName;
+  }
+
+  @Explain(displayName = "name")
+  public String getMacroName() {
+    return macroName;
+  }
+
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java Sat Jun 15 00:59:04 2013
@@ -28,6 +28,8 @@ public class FunctionWork implements Ser
   private static final long serialVersionUID = 1L;
   private CreateFunctionDesc createFunctionDesc;
   private DropFunctionDesc dropFunctionDesc;
+  private CreateMacroDesc createMacroDesc;
+  private DropMacroDesc dropMacroDesc;
 
   /**
    * For serialization only.
@@ -43,6 +45,14 @@ public class FunctionWork implements Ser
     this.dropFunctionDesc = dropFunctionDesc;
   }
 
+  public FunctionWork(CreateMacroDesc createMacroDesc) {
+    this.createMacroDesc = createMacroDesc;
+  }
+
+  public FunctionWork(DropMacroDesc dropMacroDesc) {
+    this.dropMacroDesc = dropMacroDesc;
+  }
+
   public CreateFunctionDesc getCreateFunctionDesc() {
     return createFunctionDesc;
   }
@@ -59,4 +69,12 @@ public class FunctionWork implements Ser
     this.dropFunctionDesc = dropFunctionDesc;
   }
 
+  public CreateMacroDesc getCreateMacroDesc() {
+    return createMacroDesc;
+  }
+
+  public DropMacroDesc getDropMacroDesc() {
+    return dropMacroDesc;
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1493292&r1=1493291&r2=1493292&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Sat Jun 15 00:59:04 2013
@@ -66,6 +66,8 @@ public enum HiveOperation {
   SHOWLOCKS("SHOWLOCKS", null, null),
   CREATEFUNCTION("CREATEFUNCTION", null, null),
   DROPFUNCTION("DROPFUNCTION", null, null),
+  CREATEMACRO("CREATEMACRO", null, null),
+  DROPMACRO("DROPMACRO", null, null),
   CREATEVIEW("CREATEVIEW", null, null),
   DROPVIEW("DROPVIEW", null, null),
   CREATEINDEX("CREATEINDEX", null, null),

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMacro.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,171 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
+import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * GenericUDFMacro wraps a user-defined macro expression into a GenericUDF
+ * interface.
+ */
+public class GenericUDFMacro extends GenericUDF implements Serializable {
+
+  private static final long serialVersionUID = 2829755821687181020L;
+  private String macroName;
+  private ExprNodeDesc bodyDesc;
+  private transient ExprNodeEvaluator body;
+  private List<String> colNames;
+  private List<TypeInfo> colTypes;
+  private transient ObjectInspectorConverters.Converter converters[];
+  private transient ArrayList<Object> evaluatedArguments;
+
+  public GenericUDFMacro(String macroName, ExprNodeDesc bodyDesc,
+                         List<String> colNames, List<TypeInfo> colTypes) {
+
+    this.macroName = macroName;
+    this.bodyDesc = bodyDesc;
+    this.colNames = colNames;
+    this.colTypes = colTypes;
+    assert(this.bodyDesc != null);
+    assert(colNames.size() == colTypes.size());
+  }
+
+  // For serialization only.
+  public GenericUDFMacro() {
+
+  }
+
+  public boolean isDeterministic() {
+    if(body != null) {
+      return body.isDeterministic();
+    }
+    return true;
+  }
+  
+  public boolean isStateful() {
+    if(body != null) {
+      return body.isStateful();
+    }
+    return false;
+  }
+ 
+  private void checkNotNull(Object object, String msg) {
+    if(object == null) {
+      throw new NullPointerException(msg);
+    }
+  }
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    checkNotNull(colTypes, "colTypes");
+    checkNotNull(arguments, "arguments");
+    checkNotNull(bodyDesc, "bodyDesc");
+    if(colTypes.size() != arguments.length) {
+      throw new UDFArgumentLengthException(
+          "The macro " + macroName + " accepts exactly " + colTypes.size() + " arguments.");
+    }
+    try {
+      body = ExprNodeEvaluatorFactory.get(bodyDesc);
+    } catch (HiveException ex) {
+      throw new UDFArgumentException(ex);
+    }
+    converters = new ObjectInspectorConverters.Converter[arguments.length];
+    ArrayList<ObjectInspector> colObjectInspectors = new ArrayList<ObjectInspector>(colTypes.size());
+    for (int index = 0; index < arguments.length; ++index) {
+      ObjectInspector objectInspector = TypeInfoUtils.
+          getStandardWritableObjectInspectorFromTypeInfo(colTypes.get(index));
+      colObjectInspectors.add(objectInspector);
+      converters[index] =
+          ObjectInspectorConverters.getConverter(arguments[index], objectInspector);
+    }
+    evaluatedArguments = new ArrayList<Object>(arguments.length);
+    ObjectInspector structOI = ObjectInspectorFactory
+        .getStandardStructObjectInspector(colNames, colObjectInspectors);
+    try {
+      return body.initialize(structOI);
+    } catch (HiveException ex) {
+      throw new UDFArgumentException(ex);
+    }
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    evaluatedArguments.clear();
+    for (int index = 0; index < arguments.length; ++index) {
+      evaluatedArguments.add(converters[index].convert(arguments[index].get()));
+    }
+    return body.evaluate(evaluatedArguments);
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+      StringBuilder sb = new StringBuilder();
+      sb.append(macroName);
+      sb.append("(");
+      for (int i = 0; i < children.length; i++) {
+        sb.append(children[i]);
+        if (i + 1 < children.length) {
+          sb.append(", ");
+        }
+      }
+      sb.append(")");
+      return sb.toString();
+  }
+
+  public void setMacroName(String macroName) {
+    this.macroName = macroName;
+  }
+  public String getMacroName() {
+    return macroName;
+  }
+
+  public void setBody(ExprNodeDesc bodyDesc) {
+    this.bodyDesc = bodyDesc;
+  }
+  public ExprNodeDesc getBody() {
+    return bodyDesc;
+  }
+
+  public void setColNames(List<String> colNames) {
+    this.colNames = colNames;
+  }
+  public List<String> getColNames() {
+    return colNames;
+  }
+
+  public void setColTypes(List<TypeInfo> colTypes) {
+    this.colTypes = colTypes;
+  }
+  public List<TypeInfo> getColTypes() {
+    return colTypes;
+  }
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.Serializable;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFMacro;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestMacroSemanticAnalyzer {
+
+  private ParseDriver parseDriver;
+  private MacroSemanticAnalyzer analyzer;
+  private HiveConf conf;
+  private Context context;
+
+  @Before
+  public void setup() throws Exception {
+    conf = new HiveConf();
+    context = new Context(conf);
+    parseDriver = new ParseDriver();
+    analyzer = new MacroSemanticAnalyzer(conf);
+  }
+
+  private ASTNode parse(String command) throws Exception {
+    return ParseUtils.findRootNonNullToken(parseDriver.parse(command));
+  }
+  private void analyze(ASTNode ast) throws Exception {
+    analyzer.analyze(ast, context);
+    List<Task<? extends Serializable>> rootTasks = analyzer.getRootTasks();
+    Assert.assertEquals(1, rootTasks.size());
+    for(Task<? extends Serializable> task : rootTasks) {
+      Assert.assertEquals(0, task.executeTask());
+    }
+  }
+  @Test
+  public void testDropMacroDoesNotExist() throws Exception {
+    analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
+  }
+  @Test
+  public void testDropMacroExistsDoNotIgnoreErrors() throws Exception {
+    conf.setBoolVar(ConfVars.DROPIGNORESNONEXISTENT, false);
+    FunctionRegistry.registerGenericUDF(false, "SOME_MACRO", GenericUDFMacro.class);
+    analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
+  }
+  @Test
+  public void testDropMacro() throws Exception {
+    FunctionRegistry.registerGenericUDF(false, "SOME_MACRO", GenericUDFMacro.class);
+    analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
+  }
+  @Test(expected = SemanticException.class)
+  public void testDropMacroNonExistent() throws Exception {
+    conf.setBoolVar(ConfVars.DROPIGNORESNONEXISTENT, false);
+    analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
+  }
+  @Test
+  public void testDropMacroNonExistentWithIfExists() throws Exception {
+    analyze(parse("DROP TEMPORARY MACRO IF EXISTS SOME_MACRO"));
+  }
+  @Test
+  public void testDropMacroNonExistentWithIfExistsDoNotIgnoreNonExistent() throws Exception {
+    conf.setBoolVar(ConfVars.DROPIGNORESNONEXISTENT, false);
+    analyze(parse("DROP TEMPORARY MACRO IF EXISTS SOME_MACRO"));
+  }
+  @Test
+  public void testZeroInputParamters() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO FIXED_NUMBER() 1"));
+  }
+  @Test
+  public void testOneInputParamters() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))"));
+  }
+  @Test
+  public void testTwoInputParamters() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO DUMB_ADD (x INT, y INT) x + y"));
+  }
+  @Test
+  public void testThreeInputParamters() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO DUMB_ADD (x INT, y INT, z INT) x + y + z"));
+  }
+  @Test(expected = ParseException.class)
+  public void testCannotUseReservedWordAsName() throws Exception {
+    parse("CREATE TEMPORARY MACRO DOUBLE (x DOUBLE) 1.0 / (1.0 + EXP(-x))");
+  }
+  @Test(expected = ParseException.class)
+  public void testNoBody() throws Exception {
+    parse("CREATE TEMPORARY MACRO DUMB_MACRO()");
+  }
+  @Test(expected = SemanticException.class)
+  public void testUnknownInputParameter() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT) x + y + z"));
+  }
+  @Test(expected = SemanticException.class)
+  public void testOneUnusedParameterName() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT) x"));
+  }
+  @Test(expected = SemanticException.class)
+  public void testTwoUnusedParameterNames() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT, z INT) x"));
+  }
+  @Test(expected = SemanticException.class)
+  public void testTwoDuplicateParameterNames() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, x INT) x + x"));
+  }
+  @Test(expected = SemanticException.class)
+  public void testThreeDuplicateParameters() throws Exception {
+    analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, x INT, x INT) x + x + x"));
+  }
+}
\ No newline at end of file

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse;
+
+import junit.framework.Assert;
+
+import org.antlr.runtime.CommonToken;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestSemanticAnalyzerFactory {
+
+  private HiveConf conf;
+  
+  @Before
+  public void setup() throws Exception {
+    conf = new HiveConf();
+  }
+  @Test
+  public void testCreate() throws Exception {
+    BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory.
+        get(conf, new ASTNode(new CommonToken(HiveParser.TOK_CREATEMACRO)));
+    Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer);
+  }
+  @Test
+  public void testDrop() throws Exception {
+    BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory.
+        get(conf, new ASTNode(new CommonToken(HiveParser.TOK_DROPMACRO)));
+    Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer);
+  }
+}
\ No newline at end of file

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.plan;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestCreateMacroDesc {
+  private String name;
+  private List<String> colNames;
+  private List<TypeInfo> colTypes;
+  private ExprNodeConstantDesc bodyDesc;
+  @Before
+  public void setup() throws Exception {
+    name = "fixed_number";
+    colNames = new ArrayList<String>();
+    colTypes = new ArrayList<TypeInfo>();
+    colNames.add("x");
+    colTypes.add(TypeInfoFactory.intTypeInfo);
+    colNames.add("y");
+    colTypes.add(TypeInfoFactory.intTypeInfo);
+    bodyDesc = new ExprNodeConstantDesc(1);
+  }
+  @Test
+  public void testCreateMacroDesc() throws Exception {
+    CreateMacroDesc desc = new CreateMacroDesc(name, colNames, colTypes, bodyDesc);
+    Assert.assertEquals(name, desc.getMacroName());
+    Assert.assertEquals(bodyDesc, desc.getBody());
+    Assert.assertEquals(colNames, desc.getColNames());
+    Assert.assertEquals(colTypes, desc.getColTypes());
+  }
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.plan;
+
+import junit.framework.Assert;
+
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestDropMacroDesc {
+  private String name;
+  @Before
+  public void setup() throws Exception {
+    name = "fixed_number";
+  }
+  @Test
+  public void testCreateMacroDesc() throws Exception {
+    DropMacroDesc desc = new DropMacroDesc(name);
+    Assert.assertEquals(name, desc.getMacroName());
+  }
+}

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMacro.java Sat Jun 15 00:59:04 2013
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.IntWritable;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestGenericUDFMacro {
+
+  private String name;
+  private GenericUDFMacro udf;
+  private List<String> colNames;
+  private List<TypeInfo> colTypes;
+  private ObjectInspector[] inspectors;
+  private DeferredObject[] arguments;
+  private IntWritable x;
+  private IntWritable y;
+  private ExprNodeConstantDesc bodyDesc;
+  private int expected;
+  @Before
+  public void setup() throws Exception {
+    name = "fixed_number";
+    colNames = new ArrayList<String>();
+    colTypes = new ArrayList<TypeInfo>();
+    colNames.add("x");
+    colTypes.add(TypeInfoFactory.intTypeInfo);
+    colNames.add("y");
+    colTypes.add(TypeInfoFactory.intTypeInfo);
+    x = new IntWritable(1);
+    y = new IntWritable(2);
+    expected = x.get() + y.get();
+    bodyDesc = new ExprNodeConstantDesc(expected);
+    inspectors = new ObjectInspector[] {
+        PrimitiveObjectInspectorFactory.
+          getPrimitiveWritableConstantObjectInspector(
+            PrimitiveObjectInspector.PrimitiveCategory.INT, x),
+        PrimitiveObjectInspectorFactory.
+          getPrimitiveWritableConstantObjectInspector(
+            PrimitiveObjectInspector.PrimitiveCategory.INT, y),
+    };
+    arguments = new DeferredObject[] {
+        new DeferredJavaObject(x),
+        new DeferredJavaObject(y)
+    };
+  }
+
+  @Test
+  public void testUDF() throws Exception {
+    udf = new GenericUDFMacro(name, bodyDesc, colNames, colTypes);
+    udf.initialize(inspectors);
+    Object actual = udf.evaluate(arguments);
+    Assert.assertEquals(bodyDesc.getValue(), ((IntWritable)actual).get());
+    Assert.assertTrue(udf.isDeterministic());
+    Assert.assertFalse(udf.isStateful());
+    Assert.assertEquals(name, udf.getMacroName());
+    Assert.assertEquals(bodyDesc, udf.getBody());
+    Assert.assertEquals(colNames, udf.getColNames());
+    Assert.assertEquals(colTypes, udf.getColTypes());
+    Assert.assertEquals(name + "(x, y)", udf.getDisplayString(new String[] { "x", "y"}));
+  }
+  @Test
+  public void testNoArgsContructor() throws Exception {
+    udf = new GenericUDFMacro();
+    Assert.assertTrue(udf.isDeterministic());
+    Assert.assertFalse(udf.isStateful());
+  }
+}

Added: hive/trunk/ql/src/test/queries/clientnegative/macro_unused_parameter.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/macro_unused_parameter.q?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/macro_unused_parameter.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/macro_unused_parameter.q Sat Jun 15 00:59:04 2013
@@ -0,0 +1 @@
+CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT) x;

Added: hive/trunk/ql/src/test/queries/clientpositive/macro.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/macro.q?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/macro.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/macro.q Sat Jun 15 00:59:04 2013
@@ -0,0 +1,26 @@
+CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x));
+SELECT SIGMOID(2) FROM src LIMIT 1;
+EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1;
+EXPLAIN EXTENDED SELECT SIGMOID(2) FROM src LIMIT 1;
+DROP TEMPORARY MACRO SIGMOID;
+
+CREATE TEMPORARY MACRO FIXED_NUMBER() 1;
+SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1;
+EXPLAIN SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1;
+EXPLAIN EXTENDED SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1;
+DROP TEMPORARY MACRO FIXED_NUMBER;
+
+set macrotest=1;
+CREATE TEMPORARY MACRO CONF_TEST() "${hiveconf:macrotest}";
+SELECT CONF_TEST() FROM src LIMIT 1;
+DROP TEMPORARY MACRO CONF_TEST;
+
+CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y;
+CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y;
+SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1;
+EXPLAIN SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1;
+EXPLAIN EXTENDED SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1;
+DROP TEMPORARY MACRO SIMPLE_ADD;
+DROP TEMPORARY MACRO SIMPLE_ADD;
+
+

Added: hive/trunk/ql/src/test/queries/negative/macro_reserved_word.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/negative/macro_reserved_word.q?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/negative/macro_reserved_word.q (added)
+++ hive/trunk/ql/src/test/queries/negative/macro_reserved_word.q Sat Jun 15 00:59:04 2013
@@ -0,0 +1 @@
+CREATE TEMPORARY MACRO DOUBLE (x DOUBLE) 1.0 / (1.0 + EXP(-x));

Added: hive/trunk/ql/src/test/results/clientnegative/macro_unused_parameter.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/macro_unused_parameter.q.out?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/macro_unused_parameter.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/macro_unused_parameter.q.out Sat Jun 15 00:59:04 2013
@@ -0,0 +1 @@
+FAILED: SemanticException Expected columns [y, x] but found [x]

Added: hive/trunk/ql/src/test/results/clientpositive/macro.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/macro.q.out?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/macro.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/macro.q.out Sat Jun 15 00:59:04 2013
@@ -0,0 +1,472 @@
+PREHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+PREHOOK: type: CREATEMACRO
+POSTHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+POSTHOOK: type: CREATEMACRO
+PREHOOK: query: SELECT SIGMOID(2) FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SIGMOID(2) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0.8807970779778823
+PREHOOK: query: EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SIGMOID(2) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIGMOID 2))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: SIGMOID(2)
+                    type: double
+              outputColumnNames: _col0
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: EXPLAIN EXTENDED SELECT SIGMOID(2) FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT SIGMOID(2) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIGMOID 2))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: SIGMOID(2)
+                    type: double
+              outputColumnNames: _col0
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types double
+                        escape.delim \
+                        serialization.format 1
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: DROP TEMPORARY MACRO SIGMOID
+PREHOOK: type: DROPMACRO
+POSTHOOK: query: DROP TEMPORARY MACRO SIGMOID
+POSTHOOK: type: DROPMACRO
+PREHOOK: query: CREATE TEMPORARY MACRO FIXED_NUMBER() 1
+PREHOOK: type: CREATEMACRO
+POSTHOOK: query: CREATE TEMPORARY MACRO FIXED_NUMBER() 1
+POSTHOOK: type: CREATEMACRO
+PREHOOK: query: SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+2
+PREHOOK: query: EXPLAIN SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION FIXED_NUMBER) 1))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: (FIXED_NUMBER() + 1)
+                    type: int
+              outputColumnNames: _col0
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: EXPLAIN EXTENDED SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT FIXED_NUMBER() + 1 FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_FUNCTION FIXED_NUMBER) 1))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: (FIXED_NUMBER() + 1)
+                    type: int
+              outputColumnNames: _col0
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types int
+                        escape.delim \
+                        serialization.format 1
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: DROP TEMPORARY MACRO FIXED_NUMBER
+PREHOOK: type: DROPMACRO
+POSTHOOK: query: DROP TEMPORARY MACRO FIXED_NUMBER
+POSTHOOK: type: DROPMACRO
+PREHOOK: query: CREATE TEMPORARY MACRO CONF_TEST() "1"
+PREHOOK: type: CREATEMACRO
+POSTHOOK: query: CREATE TEMPORARY MACRO CONF_TEST() "1"
+POSTHOOK: type: CREATEMACRO
+PREHOOK: query: SELECT CONF_TEST() FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT CONF_TEST() FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+PREHOOK: query: DROP TEMPORARY MACRO CONF_TEST
+PREHOOK: type: DROPMACRO
+POSTHOOK: query: DROP TEMPORARY MACRO CONF_TEST
+POSTHOOK: type: DROPMACRO
+PREHOOK: query: CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y
+PREHOOK: type: CREATEMACRO
+POSTHOOK: query: CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y
+POSTHOOK: type: CREATEMACRO
+PREHOOK: query: CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y
+PREHOOK: type: CREATEMACRO
+POSTHOOK: query: CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y
+POSTHOOK: type: CREATEMACRO
+PREHOOK: query: SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10
+PREHOOK: query: EXPLAIN SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIMPLE_ADD 1 9))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            Select Operator
+              expressions:
+                    expr: SIMPLE_ADD(1, 9)
+                    type: int
+              outputColumnNames: _col0
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: EXPLAIN EXTENDED SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED SELECT SIMPLE_ADD(1, 9) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION SIMPLE_ADD 1 9))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        src 
+          TableScan
+            alias: src
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: SIMPLE_ADD(1, 9)
+                    type: int
+              outputColumnNames: _col0
+              Limit
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types int
+                        escape.delim \
+                        serialization.format 1
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numPartitions 0
+              numRows 0
+              rawDataSize 0
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numPartitions 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+
+
+PREHOOK: query: DROP TEMPORARY MACRO SIMPLE_ADD
+PREHOOK: type: DROPMACRO
+POSTHOOK: query: DROP TEMPORARY MACRO SIMPLE_ADD
+POSTHOOK: type: DROPMACRO
+PREHOOK: query: DROP TEMPORARY MACRO SIMPLE_ADD
+PREHOOK: type: DROPMACRO
+POSTHOOK: query: DROP TEMPORARY MACRO SIMPLE_ADD
+POSTHOOK: type: DROPMACRO

Added: hive/trunk/ql/src/test/results/compiler/errors/macro_reserved_word.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/errors/macro_reserved_word.q.out?rev=1493292&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/errors/macro_reserved_word.q.out (added)
+++ hive/trunk/ql/src/test/results/compiler/errors/macro_reserved_word.q.out Sat Jun 15 00:59:04 2013
@@ -0,0 +1 @@
+Parse Error: line 1:23 mismatched input 'DOUBLE' expecting Identifier near 'MACRO' in create macro statement