You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2010/10/08 19:58:35 UTC
svn commit: r1005935 - in /hadoop/hive/trunk: ./
common/src/java/org/apache/hadoop/hive/conf/
ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/ql/metadata/
ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/org/apache/had...
Author: namit
Date: Fri Oct 8 17:58:35 2010
New Revision: 1005935
URL: http://svn.apache.org/viewvc?rev=1005935&view=rev
Log:
HIVE-1546 Ability to plug custom Semantic Analyzers for Hive Grammar
(Ashutosh Chauhan via namit)
Added:
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
hadoop/hive/trunk/ql/src/test/queries/clientnegative/genericFileFormat.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/inoutdriver.q
hadoop/hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/inoutdriver.q.out
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Fri Oct 8 17:58:35 2010
@@ -92,6 +92,9 @@ Trunk - Unreleased
HIVE-1427. Add metastore schema migration scripts (0.5 -> 0.6)
(Carl Steinback via namit)
+ HIVE-1546 Ability to plug custom Semantic Analyzers for Hive Grammar
+ (Ashutosh Chauhan via namit)
+
IMPROVEMENTS
HIVE-1394. Do not update transient_lastDdlTime if the partition is modified by a housekeeping
Modified: hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hadoop/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Oct 8 17:58:35 2010
@@ -303,7 +303,9 @@ public class HiveConf extends Configurat
HIVEOUTERJOINSUPPORTSFILTERS("hive.outerjoin.supports.filters", true),
// Serde for FetchTask
- HIVEFETCHOUTPUTSERDE("hive.fetch.output.serde", "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe")
+ HIVEFETCHOUTPUTSERDE("hive.fetch.output.serde", "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"),
+
+ SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook",null),
;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Oct 8 17:58:35 2010
@@ -40,6 +40,7 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
@@ -63,10 +64,16 @@ import org.apache.hadoop.hive.ql.lockmgr
import org.apache.hadoop.hive.ql.lockmgr.HiveLockMode;
import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
import org.apache.hadoop.hive.ql.lockmgr.LockException;
-import org.apache.hadoop.hive.ql.io.IOPrepareCache;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.ErrorMsg;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl;
import org.apache.hadoop.hive.ql.parse.ParseDriver;
import org.apache.hadoop.hive.ql.parse.ParseException;
import org.apache.hadoop.hive.ql.parse.ParseUtils;
@@ -83,9 +90,6 @@ import org.apache.hadoop.mapred.JobClien
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.DummyPartition;
-import org.apache.hadoop.hive.ql.metadata.Table;
public class Driver implements CommandProcessor {
@@ -322,8 +326,21 @@ public class Driver implements CommandPr
tree = ParseUtils.findRootNonNullToken(tree);
BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
+ String hookName = HiveConf.getVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK);
+
// Do semantic analysis and plan generation
- sem.analyze(tree, ctx);
+ if (hookName != null){
+ AbstractSemanticAnalyzerHook hook = HiveUtils.getSemanticAnalyzerHook(conf, hookName);
+ HiveSemanticAnalyzerHookContext hookCtx = new HiveSemanticAnalyzerHookContextImpl();
+ hookCtx.setConf(conf);
+ hook.preAnalyze(hookCtx, tree);
+ sem.analyze(tree, ctx);
+ hook.postAnalyze(hookCtx, sem.getRootTasks());
+ }
+ else{
+ sem.analyze(tree, ctx);
+ }
+
LOG.info("Semantic Analysis Completed");
// validate the plan
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java Fri Oct 8 17:58:35 2010
@@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
import org.apache.hadoop.util.ReflectionUtils;
/**
@@ -168,4 +169,18 @@ public final class HiveUtils {
}
}
+ public static AbstractSemanticAnalyzerHook getSemanticAnalyzerHook(
+ HiveConf conf, String hookName) throws HiveException{
+ try {
+ Class<? extends AbstractSemanticAnalyzerHook> hookClass =
+ (Class<? extends AbstractSemanticAnalyzerHook>)
+ Class.forName(hookName, true, JavaUtils.getClassLoader());
+ return (AbstractSemanticAnalyzerHook) ReflectionUtils.newInstance(
+ hookClass, conf);
+ } catch (ClassNotFoundException e) {
+ throw new HiveException("Error in loading semantic analyzer hook: "+
+ hookName +e.getMessage(),e);
+ }
+
+ }
}
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/AbstractSemanticAnalyzerHook.java Fri Oct 8 17:58:35 2010
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Task;
+
+public abstract class AbstractSemanticAnalyzerHook implements
+HiveSemanticAnalyzerHook {
+
+ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context,ASTNode ast)
+ throws SemanticException {
+ return ast;
+ }
+
+ public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+ List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ }
+}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Fri Oct 8 17:58:35 2010
@@ -449,10 +449,17 @@ public abstract class BaseSemanticAnalyz
return getColumns(ast, true);
}
+ protected void handleGenericFileFormat(ASTNode node) throws SemanticException{
+
+ ASTNode child = (ASTNode)node.getChild(0);
+ throw new SemanticException("Unrecognized file format in STORED AS clause:"+
+ " "+ (child == null ? "" : child.getText()));
+ }
+
/**
* Get the list of FieldSchema out of the ASTNode.
*/
- protected List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase) throws SemanticException {
+ public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase) throws SemanticException {
List<FieldSchema> colList = new ArrayList<FieldSchema>();
int numCh = ast.getChildCount();
for (int i = 0; i < numCh; i++) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Oct 8 17:58:35 2010
@@ -75,17 +75,17 @@ import org.apache.hadoop.hive.ql.plan.Dr
import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.FetchWork;
+import org.apache.hadoop.hive.ql.plan.LockTableDesc;
import org.apache.hadoop.hive.ql.plan.MsckDesc;
import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
-import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
-import org.apache.hadoop.hive.ql.plan.LockTableDesc;
-import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -593,6 +593,9 @@ public class DDLSemanticAnalyzer extends
outputFormat = RCFILE_OUTPUT;
serde = COLUMNAR_SERDE;
break;
+ case HiveParser.TOK_FILEFORMAT_GENERIC:
+ handleGenericFileFormat(child);
+ break;
}
AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, inputFormat,
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Fri Oct 8 17:58:35 2010
@@ -140,6 +140,7 @@ TOK_TBLSEQUENCEFILE;
TOK_TBLTEXTFILE;
TOK_TBLRCFILE;
TOK_TABLEFILEFORMAT;
+TOK_FILEFORMAT_GENERIC;
TOK_OFFLINE;
TOK_ENABLE;
TOK_DISABLE;
@@ -600,8 +601,9 @@ fileFormat
: KW_SEQUENCEFILE -> ^(TOK_TBLSEQUENCEFILE)
| KW_TEXTFILE -> ^(TOK_TBLTEXTFILE)
| KW_RCFILE -> ^(TOK_TBLRCFILE)
- | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral
- -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt)
+ | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+ -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
+ | genericSpec=Identifier -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
;
tabTypeExpr
@@ -849,11 +851,13 @@ tableFileFormat
KW_STORED KW_AS KW_SEQUENCEFILE -> TOK_TBLSEQUENCEFILE
| KW_STORED KW_AS KW_TEXTFILE -> TOK_TBLTEXTFILE
| KW_STORED KW_AS KW_RCFILE -> TOK_TBLRCFILE
- | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral
- -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt)
+ | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
+ -> ^(TOK_TABLEFILEFORMAT $inFmt $outFmt $inDriver? $outDriver?)
| KW_STORED KW_BY storageHandler=StringLiteral
(KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
-> ^(TOK_STORAGEHANDLER $storageHandler $serdeprops?)
+ | KW_STORED KW_AS genericSpec=Identifier
+ -> ^(TOK_FILEFORMAT_GENERIC $genericSpec)
;
tableLocation
@@ -1793,6 +1797,8 @@ KW_TEXTFILE: 'TEXTFILE';
KW_RCFILE: 'RCFILE';
KW_INPUTFORMAT: 'INPUTFORMAT';
KW_OUTPUTFORMAT: 'OUTPUTFORMAT';
+KW_INPUTDRIVER: 'INPUTDRIVER';
+KW_OUTPUTDRIVER: 'OUTPUTDRIVER';
KW_OFFLINE: 'OFFLINE';
KW_ENABLE: 'ENABLE';
KW_DISABLE: 'DISABLE';
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHook.java Fri Oct 8 17:58:35 2010
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Task;
+
+/**
+ * HiveSemanticAnalyzerHook allows Hive to be extended with custom
+ * logic for semantic analysis of QL statements. This interface
+ * and any Hive internals it exposes are currently
+ * "limited private and evolving" (unless otherwise stated elsewhere)
+ * and intended mainly for use by the Howl project.
+ *
+ *<p>
+ *
+ * Note that the lifetime of an instantiated hook object is scoped to
+ * the analysis of a single statement; hook instances are never reused.
+ */
+public interface HiveSemanticAnalyzerHook {
+ /**
+ * Invoked before Hive performs its own semantic analysis on
+ * a statement. The implementation may inspect the statement AST and
+ * prevent its execution by throwing a SemanticException.
+ * Optionally, it may also augment/rewrite the AST, but must produce
+ * a form equivalent to one which could have
+ * been returned directly from Hive's own parser.
+ *
+ * @param context context information for semantic analysis
+ *
+ * @param ast AST being analyzed and optionally rewritten
+ *
+ * @return replacement AST (typically the same as the original AST unless the
+ * entire tree had to be replaced; must not be null)
+ */
+ public ASTNode preAnalyze(
+ HiveSemanticAnalyzerHookContext context,
+ ASTNode ast) throws SemanticException;
+
+ /**
+ * Invoked after Hive performs its own semantic analysis on a
+ * statement (including optimization).
+ * Hive calls postAnalyze on the same hook object
+ * as preAnalyze, so the hook can maintain state across the calls.
+ *
+ * @param context context information for semantic analysis
+ * @param rootTasks root tasks produced by semantic analysis;
+ * the hook is free to modify this list or its contents
+ */
+ public void postAnalyze(
+ HiveSemanticAnalyzerHookContext context,
+ List<Task<? extends Serializable>> rootTasks) throws SemanticException;
+}
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContext.java Fri Oct 8 17:58:35 2010
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Context information provided by Hive to implementations of
+ * HiveSemanticAnalyzerHook.
+ */
+public interface HiveSemanticAnalyzerHookContext extends Configurable{
+ /**
+ * @return the Hive db instance; hook implementations can use this for
+ * purposes such as getting configuration information or making metastore calls
+ */
+ public Hive getHive() throws HiveException;
+}
Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveSemanticAnalyzerHookContextImpl.java Fri Oct 8 17:58:35 2010
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+public class HiveSemanticAnalyzerHookContextImpl implements HiveSemanticAnalyzerHookContext {
+
+ Configuration conf;
+
+ @Override
+ public Hive getHive() throws HiveException {
+
+ return Hive.get((HiveConf)conf);
+ }
+
+ @Override
+ public Configuration getConf() {
+ return conf;
+ }
+
+ @Override
+ public void setConf(Configuration conf) {
+ this.conf = conf;
+ }
+
+}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1005935&r1=1005934&r2=1005935&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Oct 8 17:58:35 2010
@@ -29,9 +29,9 @@ import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
+import java.util.Map.Entry;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
@@ -92,7 +92,6 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.optimizer.GenMRFileSink1;
import org.apache.hadoop.hive.ql.optimizer.GenMROperator;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext;
-import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink1;
import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink2;
import org.apache.hadoop.hive.ql.optimizer.GenMRRedSink3;
@@ -102,6 +101,7 @@ import org.apache.hadoop.hive.ql.optimiz
import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
import org.apache.hadoop.hive.ql.optimizer.MapJoinFactory;
import org.apache.hadoop.hive.ql.optimizer.Optimizer;
+import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext;
import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalOptimizer;
import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner;
@@ -121,7 +121,6 @@ import org.apache.hadoop.hive.ql.plan.Ex
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
-import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.plan.ForwardDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
@@ -143,12 +142,13 @@ import org.apache.hadoop.hive.ql.plan.Ta
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.UDTFDesc;
import org.apache.hadoop.hive.ql.plan.UnionDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
@@ -156,9 +156,9 @@ import org.apache.hadoop.hive.serde2.Ser
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -6875,6 +6875,11 @@ public class SemanticAnalyzer extends Ba
shared.serdeProps);
}
break;
+
+ case HiveParser.TOK_FILEFORMAT_GENERIC:
+ handleGenericFileFormat(child);
+ break;
+
default:
assert false;
}
Added: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java (added)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/DummySemanticAnalyzerHook.java Fri Oct 8 17:58:35 2010
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.exec.DDLTask;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+
+public class DummySemanticAnalyzerHook extends AbstractSemanticAnalyzerHook{
+
+ private AbstractSemanticAnalyzerHook hook;
+
+ @Override
+ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+ throws SemanticException {
+
+ switch (ast.getToken().getType()) {
+
+ case HiveParser.TOK_CREATETABLE:
+ hook = new DummyCreateTableHook();
+ return hook.preAnalyze(context, ast);
+
+ case HiveParser.TOK_DROPTABLE:
+ case HiveParser.TOK_DESCTABLE:
+ return ast;
+
+ default:
+ throw new SemanticException("Operation not supported.");
+ }
+ }
+
+ public DummySemanticAnalyzerHook() {
+
+ }
+
+ @Override
+ public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+ List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+
+ if(hook != null) {
+ hook.postAnalyze(context, rootTasks);
+ }
+ }
+}
+
+class DummyCreateTableHook extends AbstractSemanticAnalyzerHook{
+
+ @Override
+ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+ throws SemanticException {
+
+ int numCh = ast.getChildCount();
+
+ for (int num = 1; num < numCh; num++) {
+ ASTNode child = (ASTNode) ast.getChild(num);
+
+ switch (child.getToken().getType()) {
+
+ case HiveParser.TOK_QUERY:
+ throw new SemanticException("CTAS not supported.");
+ }
+ }
+ return ast;
+ }
+
+ @Override
+ public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+ List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+ CreateTableDesc desc = ((DDLTask)rootTasks.get(rootTasks.size()-1)).getWork().getCreateTblDesc();
+ Map<String,String> tblProps = desc.getTblProps();
+ if(tblProps == null) {
+ tblProps = new HashMap<String, String>();
+ }
+ tblProps.put("createdBy", DummyCreateTableHook.class.getName());
+ tblProps.put("Message", "Open Source rocks!!");
+ desc.setTblProps(tblProps);
+ }
+}
Added: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java (added)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java Fri Oct 8 17:58:35 2010
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+
+public class TestSemanticAnalyzerHookLoading extends TestCase {
+
+ public void testHookLoading() throws Exception{
+
+ HiveConf conf = new HiveConf(this.getClass());
+ conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, DummySemanticAnalyzerHook.class.getName());
+ conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+ Driver driver = new Driver(conf);
+
+ driver.run("drop table testDL");
+ CommandProcessorResponse resp = driver.run("create table testDL (a int) as select * from tbl2");
+ assertEquals(10, resp.getResponseCode());
+ assertTrue(resp.getErrorMessage().contains("CTAS not supported."));
+
+ resp = driver.run("create table testDL (a int)");
+ assertEquals(0, resp.getResponseCode());
+ assertNull(resp.getErrorMessage());
+
+ Map<String,String> params = Hive.get(conf).getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "testDL").getParameters();
+
+ assertEquals(DummyCreateTableHook.class.getName(),params.get("createdBy"));
+ assertEquals("Open Source rocks!!", params.get("Message"));
+
+ driver.run("drop table testDL");
+ }
+}
Added: hadoop/hive/trunk/ql/src/test/queries/clientnegative/genericFileFormat.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientnegative/genericFileFormat.q?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientnegative/genericFileFormat.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientnegative/genericFileFormat.q Fri Oct 8 17:58:35 2010
@@ -0,0 +1 @@
+create table testFail (a int) stored as foo;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inoutdriver.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inoutdriver.q?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inoutdriver.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inoutdriver.q Fri Oct 8 17:58:35 2010
@@ -0,0 +1,2 @@
+create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver';
+desc extended test;
Added: hadoop/hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/genericFileFormat.q.out Fri Oct 8 17:58:35 2010
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Unrecognized file format in STORED AS clause: foo
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/inoutdriver.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/inoutdriver.q.out?rev=1005935&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/inoutdriver.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/inoutdriver.q.out Fri Oct 8 17:58:35 2010
@@ -0,0 +1,36 @@
+PREHOOK: query: create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table test (a int) stored as inputformat 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' outputformat 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'RCFileInDriver' outputdriver 'RCFileOutDriver'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@test
+PREHOOK: query: desc extended test
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc extended test
+POSTHOOK: type: DESCTABLE
+col_name data_type comment
+
+a int None
+
+# Detailed Table Information
+Database: default
+Owner: chauhana
+CreateTime: Wed Sep 29 10:52:37 PDT 2010
+LastAccessTime: Wed Dec 31 16:00:00 PST 1969
+Protect Mode: None
+Retention: 0
+Location: pfile:/Users/chauhana/workspace/howl-sprint2-hooks/build/ql/test/data/warehouse/test
+Table Type: MANAGED_TABLE
+Table Parameters:
+ transient_lastDdlTime 1285782757
+
+# Storage Information
+SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+Storage Desc Params:
+ serialization.format 1
+