You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2011/02/01 03:05:43 UTC

svn commit: r1065890 - in /hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/test/org/apache/hadoop/hive/ql/metadata/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

Author: namit
Date: Tue Feb  1 02:05:42 2011
New Revision: 1065890

URL: http://svn.apache.org/viewvc?rev=1065890&view=rev
Log:
HIVE-1936 hive.semantic.analyzer.hook cannot have multiple values
(Siying Dong via namit)


Added:
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
    hive/trunk/ql/src/test/queries/clientpositive/multi_sahooks.q
    hive/trunk/ql/src/test/results/clientpositive/multi_sahooks.q.out
Modified:
    hive/trunk/CHANGES.txt
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java

Modified: hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hive/trunk/CHANGES.txt?rev=1065890&r1=1065889&r2=1065890&view=diff
==============================================================================
--- hive/trunk/CHANGES.txt (original)
+++ hive/trunk/CHANGES.txt Tue Feb  1 02:05:42 2011
@@ -166,6 +166,9 @@ Trunk -  Unreleased
     HIVE-1935 set hive.security.authorization.createtable.owner.grants to null
     by default (He Yongqiang via namit)
 
+   HIVE-1936 hive.semantic.analyzer.hook cannot have multiple values
+   (Siying Dong via namit)
+
   IMPROVEMENTS
 
     HIVE-1931 Improve the implementation of the METASTORE_CACHE_PINOBJTYPES config (Mac Yang via cws)"

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1065890&r1=1065889&r2=1065890&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Tue Feb  1 02:05:42 2011
@@ -40,7 +40,6 @@ import org.apache.hadoop.fs.FSDataInputS
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
@@ -68,8 +67,8 @@ import org.apache.hadoop.hive.ql.lockmgr
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockObj;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
-import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
 import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.metadata.DummyPartition;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -92,8 +91,8 @@ import org.apache.hadoop.hive.ql.parse.P
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzerFactory;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -105,7 +104,6 @@ import org.apache.hadoop.mapred.ClusterS
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.hive.ql.metadata.Hive;
 
 public class Driver implements CommandProcessor {
 
@@ -330,16 +328,19 @@ public class Driver implements CommandPr
       tree = ParseUtils.findRootNonNullToken(tree);
 
       BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
-      String hookName = HiveConf.getVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK);
+      List<AbstractSemanticAnalyzerHook> saHooks = getSemanticAnalyzerHooks();
 
       // Do semantic analysis and plan generation
-      if (hookName != null) {
-        AbstractSemanticAnalyzerHook hook = HiveUtils.getSemanticAnalyzerHook(conf, hookName);
+      if (saHooks != null) {
         HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
         hookCtx.setConf(conf);
-        hook.preAnalyze(hookCtx, tree);
+        for (AbstractSemanticAnalyzerHook hook : saHooks) {
+          tree = hook.preAnalyze(hookCtx, tree);
+        }
         sem.analyze(tree, ctx);
-        hook.postAnalyze(hookCtx, sem.getRootTasks());
+        for (AbstractSemanticAnalyzerHook hook : saHooks) {
+          hook.postAnalyze(hookCtx, sem.getRootTasks());
+        }
       } else {
         sem.analyze(tree, ctx);
       }
@@ -384,7 +385,7 @@ public class Driver implements CommandPr
       if (plan.getFetchTask() != null) {
         plan.getFetchTask().initialize(conf, plan, null);
       }
-      
+
       //do the authorization check
       if (HiveConf.getBoolVar(conf,
           HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
@@ -751,6 +752,33 @@ public class Driver implements CommandPr
     return new CommandProcessorResponse(ret);
   }
 
+  private List<AbstractSemanticAnalyzerHook> getSemanticAnalyzerHooks() throws Exception {
+    ArrayList<AbstractSemanticAnalyzerHook> saHooks = new ArrayList<AbstractSemanticAnalyzerHook>();
+    String pestr = conf.getVar(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK);
+    if(pestr == null) {
+      return saHooks;
+    }
+    pestr = pestr.trim();
+    if (pestr.equals("")) {
+      return saHooks;
+    }
+
+    String[] peClasses = pestr.split(",");
+
+    for (String peClass : peClasses) {
+      try {
+        AbstractSemanticAnalyzerHook hook = HiveUtils.getSemanticAnalyzerHook(conf, peClass);
+        saHooks.add(hook);
+      } catch (HiveException e) {
+        console.printError("Pre Exec Hook Class not found:" + e.getMessage());
+        throw e;
+      }
+    }
+
+    return saHooks;
+  }
+
+
   private List<Hook> getPreExecHooks() throws Exception {
     ArrayList<Hook> pehooks = new ArrayList<Hook>();
     String pestr = conf.getVar(HiveConf.ConfVars.PREEXECHOOKS);

Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java?rev=1065890&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook1.java Tue Feb  1 02:05:42 2011
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.exec.DDLTask;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+
+public class DummySemanticAnalyzerHook1 extends AbstractSemanticAnalyzerHook {
+  static int count = 0;
+  int myCount = -1;
+  boolean isCreateTable;
+
+  @Override
+  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+      throws SemanticException {
+    LogHelper console = SessionState.getConsole();
+    isCreateTable = (ast.getToken().getType() == HiveParser.TOK_CREATETABLE);
+    myCount = count++;
+    if (isCreateTable) {
+      console.printError("DummySemanticAnalyzerHook1 Pre: Count " + myCount);
+    }
+    return ast;
+  }
+
+  public DummySemanticAnalyzerHook1() {
+  }
+
+  @Override
+  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+    count = 0;
+    if (!isCreateTable) {
+      return;
+    }
+
+    CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1)).getWork()
+        .getCreateTblDesc();
+    Map<String, String> tblProps = desc.getTblProps();
+    if (tblProps == null) {
+      tblProps = new HashMap<String, String>();
+    }
+    tblProps.put("createdBy", DummyCreateTableHook.class.getName());
+    tblProps.put("Message", "Hive rocks!! Count: " + myCount);
+
+    LogHelper console = SessionState.getConsole();
+    console.printError("DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: " + myCount);
+  }
+}

Added: hive/trunk/ql/src/test/queries/clientpositive/multi_sahooks.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/multi_sahooks.q?rev=1065890&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/multi_sahooks.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/multi_sahooks.q Tue Feb  1 02:05:42 2011
@@ -0,0 +1,30 @@
+set hive.semantic.analyzer.hook=org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook1;
+
+drop table tbl_sahook;
+create table tbl_sahook (c string);
+desc extended tbl_sahook;
+drop table tbl_sahook;
+
+set hive.semantic.analyzer.hook=org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook1,org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook;
+
+drop table tbl_sahooks;
+create table tbl_sahooks (c string);
+desc extended tbl_sahooks;
+drop table tbl_sahooks;
+
+set hive.semantic.analyzer.hook=org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook,org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook1;
+
+drop table tbl_sahooks;
+create table tbl_sahooks (c string);
+desc extended tbl_sahooks;
+drop table tbl_sahooks;
+
+set hive.semantic.analyzer.hook=org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook1,org.apache.hadoop.hive.ql.metadata.DummySemanticAnalyzerHook1;
+
+drop table tbl_sahooks;
+create table tbl_sahooks (c string);
+desc extended tbl_sahooks;
+
+set hive.semantic.analyzer.hook=;
+drop table tbl_sahooks;
+

Added: hive/trunk/ql/src/test/results/clientpositive/multi_sahooks.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/multi_sahooks.q.out?rev=1065890&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/multi_sahooks.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/multi_sahooks.q.out Tue Feb  1 02:05:42 2011
@@ -0,0 +1,106 @@
+PREHOOK: query: drop table tbl_sahook
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tbl_sahook
+POSTHOOK: type: DROPTABLE
+DummySemanticAnalyzerHook1 Pre: Count 0
+DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: 0
+PREHOOK: query: create table tbl_sahook (c string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tbl_sahook (c string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tbl_sahook
+PREHOOK: query: desc extended tbl_sahook
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended tbl_sahook
+POSTHOOK: type: DESCTABLE
+c	string	
+	 	 
+Detailed Table Information	Table(tableName:tbl_sahook, dbName:default, owner:sdong, createTime:1296268373, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/tbl_sahook, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{createdBy=org.apache.hadoop.hive.ql.metadata.DummyCreateTableHook, Message=Hive rocks!! Count: 0, transient_lastDdlTime=1296268373}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: drop table tbl_sahook
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_sahook
+PREHOOK: Output: default@tbl_sahook
+POSTHOOK: query: drop table tbl_sahook
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tbl_sahook
+POSTHOOK: Output: default@tbl_sahook
+PREHOOK: query: drop table tbl_sahooks
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tbl_sahooks
+POSTHOOK: type: DROPTABLE
+DummySemanticAnalyzerHook1 Pre: Count 0
+DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: 0
+PREHOOK: query: create table tbl_sahooks (c string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tbl_sahooks (c string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tbl_sahooks
+PREHOOK: query: desc extended tbl_sahooks
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended tbl_sahooks
+POSTHOOK: type: DESCTABLE
+c	string	
+	 	 
+Detailed Table Information	Table(tableName:tbl_sahooks, dbName:default, owner:sdong, createTime:1296268374, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/tbl_sahooks, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{createdBy=org.apache.hadoop.hive.ql.metadata.DummyCreateTableHook, Message=Open Source rocks!!, transient_lastDdlTime=1296268374}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: drop table tbl_sahooks
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_sahooks
+PREHOOK: Output: default@tbl_sahooks
+POSTHOOK: query: drop table tbl_sahooks
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tbl_sahooks
+POSTHOOK: Output: default@tbl_sahooks
+PREHOOK: query: drop table tbl_sahooks
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tbl_sahooks
+POSTHOOK: type: DROPTABLE
+DummySemanticAnalyzerHook1 Pre: Count 0
+DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: 0
+PREHOOK: query: create table tbl_sahooks (c string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tbl_sahooks (c string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tbl_sahooks
+PREHOOK: query: desc extended tbl_sahooks
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended tbl_sahooks
+POSTHOOK: type: DESCTABLE
+c	string	
+	 	 
+Detailed Table Information	Table(tableName:tbl_sahooks, dbName:default, owner:sdong, createTime:1296268375, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/tbl_sahooks, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{createdBy=org.apache.hadoop.hive.ql.metadata.DummyCreateTableHook, Message=Hive rocks!! Count: 0, transient_lastDdlTime=1296268375}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: drop table tbl_sahooks
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_sahooks
+PREHOOK: Output: default@tbl_sahooks
+POSTHOOK: query: drop table tbl_sahooks
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tbl_sahooks
+POSTHOOK: Output: default@tbl_sahooks
+PREHOOK: query: drop table tbl_sahooks
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tbl_sahooks
+POSTHOOK: type: DROPTABLE
+DummySemanticAnalyzerHook1 Pre: Count 0
+DummySemanticAnalyzerHook1 Pre: Count 1
+DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: 0
+DummySemanticAnalyzerHook1 Post: Hive rocks!! Count: 1
+PREHOOK: query: create table tbl_sahooks (c string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tbl_sahooks (c string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tbl_sahooks
+PREHOOK: query: desc extended tbl_sahooks
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended tbl_sahooks
+POSTHOOK: type: DESCTABLE
+c	string	
+	 	 
+Detailed Table Information	Table(tableName:tbl_sahooks, dbName:default, owner:sdong, createTime:1296268375, lastAccessTime:0, retention:0, sd:StorageDescriptor(cols:[FieldSchema(name:c, type:string, comment:null)], location:pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/tbl_sahooks, inputFormat:org.apache.hadoop.mapred.TextInputFormat, outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, parameters:{serialization.format=1}), bucketCols:[], sortCols:[], parameters:{}), partitionKeys:[], parameters:{createdBy=org.apache.hadoop.hive.ql.metadata.DummyCreateTableHook, Message=Hive rocks!! Count: 1, transient_lastDdlTime=1296268375}, viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)	
+PREHOOK: query: drop table tbl_sahooks
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_sahooks
+PREHOOK: Output: default@tbl_sahooks
+POSTHOOK: query: drop table tbl_sahooks
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tbl_sahooks
+POSTHOOK: Output: default@tbl_sahooks