You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mg...@apache.org on 2019/09/11 11:19:11 UTC
[hive] branch master updated: HIVE-22179 Break up
DDLSemanticAnalyzer - extract Function related analyzers (Miklos Gergely
reviewd by Jesus Camacho Rodriguez)
This is an automated email from the ASF dual-hosted git repository.
mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 5b1afb7 HIVE-22179 Break up DDLSemanticAnalyzer - extract Function related analyzers (Miklos Gergely reviewd by Jesus Camacho Rodriguez)
5b1afb7 is described below
commit 5b1afb78f4d2cf0ab7a7e83532a5557be2ec353d
Author: miklosgergely <mg...@cloudera.com>
AuthorDate: Thu Aug 29 14:11:20 2019 +0200
HIVE-22179 Break up DDLSemanticAnalyzer - extract Function related analyzers (Miklos Gergely reviewd by Jesus Camacho Rodriguez)
---
.../ql/ddl/function/AbstractFunctionAnalyzer.java | 81 ++++++++
.../function/create/CreateFunctionAnalyzer.java | 108 +++++++++++
.../function/{ => create}/CreateFunctionDesc.java | 2 +-
.../{ => create}/CreateFunctionOperation.java | 2 +-
.../hive/ql/ddl/function/create/package-info.java} | 21 +--
.../ql/ddl/function/desc/DescFunctionAnalyzer.java | 58 ++++++
.../ddl/function/{ => desc}/DescFunctionDesc.java | 8 +-
.../function/{ => desc}/DescFunctionOperation.java | 5 +-
.../hive/ql/ddl/function/desc/package-info.java} | 21 +--
.../ql/ddl/function/drop/DropFunctionAnalyzer.java | 67 +++++++
.../ddl/function/{ => drop}/DropFunctionDesc.java | 2 +-
.../function/{ => drop}/DropFunctionOperation.java | 2 +-
.../hive/ql/ddl/function/drop/package-info.java} | 21 +--
.../function/macro/create/CreateMacroAnalyzer.java | 139 ++++++++++++++
.../{ => macro/create}/CreateMacroDesc.java | 2 +-
.../{ => macro/create}/CreateMacroOperation.java | 2 +-
.../ddl/function/macro/create/package-info.java} | 21 +--
.../ddl/function/macro/drop/DropMacroAnalyzer.java | 67 +++++++
.../function/{ => macro/drop}/DropMacroDesc.java | 2 +-
.../{ => macro/drop}/DropMacroOperation.java | 2 +-
.../ql/ddl/function/macro/drop/package-info.java} | 21 +--
.../function/reload/ReloadFunctionsAnalyzer.java | 43 +++++
.../function/{ => reload}/ReloadFunctionsDesc.java | 2 +-
.../{ => reload}/ReloadFunctionsOperation.java | 2 +-
.../hive/ql/ddl/function/reload/package-info.java} | 21 +--
.../ddl/function/show/ShowFunctionsAnalyzer.java | 59 ++++++
.../ddl/function/{ => show}/ShowFunctionsDesc.java | 2 +-
.../{ => show}/ShowFunctionsOperation.java | 2 +-
.../hive/ql/ddl/function/show/package-info.java} | 21 +--
.../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java | 62 -------
.../hive/ql/parse/FunctionSemanticAnalyzer.java | 204 ---------------------
.../hive/ql/parse/MacroSemanticAnalyzer.java | 175 ------------------
.../hive/ql/parse/SemanticAnalyzerFactory.java | 11 --
.../repl/load/message/CreateFunctionHandler.java | 2 +-
.../repl/load/message/DropFunctionHandler.java | 2 +-
.../hive/ql/parse/TestMacroSemanticAnalyzer.java | 21 ++-
.../hive/ql/parse/TestSemanticAnalyzerFactory.java | 13 +-
.../hadoop/hive/ql/plan/TestCreateMacroDesc.java | 2 +-
.../hadoop/hive/ql/plan/TestDropMacroDesc.java | 2 +-
39 files changed, 684 insertions(+), 616 deletions(-)
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java
new file mode 100644
index 0000000..997cb97
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Abstract ancestor of function related ddl analyzer classes.
+ */
+public abstract class AbstractFunctionAnalyzer extends BaseSemanticAnalyzer {
+ public AbstractFunctionAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ /**
+ * Add write entities to the semantic analyzer to restrict function creation to privileged users.
+ */
+ protected void addEntities(String functionName, String className, boolean isTemporary,
+ List<ResourceUri> resources) throws SemanticException {
+ // If the function is being added under a database 'namespace', then add an entity representing
+ // the database (only applicable to permanent/metastore functions).
+ // We also add a second entity representing the function name.
+ // The authorization api implementation can decide which entities it wants to use to
+ // authorize the create/drop function call.
+
+ // Add the relevant database 'namespace' as a WriteEntity
+ Database database = null;
+
+ // temporary functions don't have any database 'namespace' associated with it
+ if (!isTemporary) {
+ try {
+ String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
+ String databaseName = qualifiedNameParts[0];
+ functionName = qualifiedNameParts[1];
+ database = getDatabase(databaseName);
+ } catch (HiveException e) {
+ LOG.error("Failed to get database ", e);
+ throw new SemanticException(e);
+ }
+ }
+ if (database != null) {
+ outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
+ }
+
+ // Add the function name as a WriteEntity
+ outputs.add(new WriteEntity(database, functionName, className, Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
+
+ if (resources != null) {
+ for (ResourceUri resource : resources) {
+ String uriPath = resource.getUri();
+ outputs.add(toWriteEntity(uriPath));
+ }
+ }
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java
new file mode 100644
index 0000000..40dc4dc
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionAnalyzer.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.create;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.ResourceType;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.function.AbstractFunctionAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.ImmutableMap;
+
+/**
+ * Analyzer for function creation commands.
+ */
+@DDLType(type=HiveParser.TOK_CREATEFUNCTION)
+public class CreateFunctionAnalyzer extends AbstractFunctionAnalyzer {
+ private static final Logger SESSION_STATE_LOG = LoggerFactory.getLogger("SessionState");
+
+ public CreateFunctionAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ String functionName = root.getChild(0).getText().toLowerCase();
+ boolean isTemporary = (root.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
+ if (isTemporary && FunctionUtils.isQualifiedFunctionName(functionName)) {
+ throw new SemanticException("Temporary function cannot be created with a qualified name.");
+ }
+
+ String className = unescapeSQLString(root.getChild(1).getText());
+
+ List<ResourceUri> resources = getResourceList(root);
+ if (!isTemporary && resources == null) {
+ SESSION_STATE_LOG.warn("permanent functions created without USING clause will not be replicated.");
+ }
+
+ CreateFunctionDesc desc = new CreateFunctionDesc(functionName, className, isTemporary, resources, null);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+ addEntities(functionName, className, isTemporary, resources);
+ }
+
+ private static final Map<Integer, ResourceType> TOKEN_TYPE_TO_RESOURCE_TYPE = ImmutableMap.of(
+ HiveParser.TOK_JAR, ResourceType.JAR,
+ HiveParser.TOK_FILE, ResourceType.FILE,
+ HiveParser.TOK_ARCHIVE, ResourceType.ARCHIVE);
+
+ private List<ResourceUri> getResourceList(ASTNode ast) throws SemanticException {
+ List<ResourceUri> resources = null;
+
+ ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
+ if (resourcesNode != null) {
+ resources = new ArrayList<ResourceUri>();
+ for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
+ // ^(TOK_RESOURCE_URI $resType $resPath)
+ ASTNode node = (ASTNode) resourcesNode.getChild(idx);
+ if (node.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
+ throw new SemanticException("Expected token type TOK_RESOURCE_URI but found " + node.getToken().toString());
+ }
+ if (node.getChildCount() != 2) {
+ throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found " + node.getChildCount());
+ }
+
+ ASTNode resourceTypeNode = (ASTNode) node.getChild(0);
+ ASTNode resourceUriNode = (ASTNode) node.getChild(1);
+ ResourceType resourceType = TOKEN_TYPE_TO_RESOURCE_TYPE.get(resourceTypeNode.getType());
+ if (resourceType == null) {
+ throw new SemanticException("Unexpected token " + resourceTypeNode);
+ }
+
+ resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resourceUriNode.getText())));
+ }
+ }
+
+ return resources;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionDesc.java
similarity index 98%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionDesc.java
index 9e21aeb..dbec148 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.create;
import java.io.Serializable;
import java.util.List;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionOperation.java
similarity index 99%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionOperation.java
index 995beed..9489675 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/CreateFunctionOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.create;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/package-info.java
index 5d553b4..ebfa2d6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/create/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Function creation DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.create;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java
new file mode 100644
index 0000000..50454f4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionAnalyzer.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.desc;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.function.AbstractFunctionAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for function describing commands.
+ */
+@DDLType(type=HiveParser.TOK_DESCFUNCTION)
+public class DescFunctionAnalyzer extends AbstractFunctionAnalyzer {
+ public DescFunctionAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ ctx.setResFile(ctx.getLocalTmpPath());
+
+ if (root.getChildCount() < 1 || root.getChildCount() > 2) {
+ throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
+ }
+
+ String functionName = stripQuotes(root.getChild(0).getText());
+ boolean isExtended = root.getChildCount() == 2;
+
+ DescFunctionDesc desc = new DescFunctionDesc(ctx.getResFile(), functionName, isExtended);
+ Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+ rootTasks.add(task);
+
+ task.setFetchSource(true);
+ setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA));
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionDesc.java
similarity index 92%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionDesc.java
index 8adbc5b..892ec99 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.desc;
import java.io.Serializable;
@@ -34,18 +34,18 @@ public class DescFunctionDesc implements DDLDesc, Serializable {
public static final String SCHEMA = "tab_name#string";
- private final String resFile;
+ private final Path resFile;
private final String name;
private final boolean isExtended;
public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
- this.resFile = resFile.toString();
+ this.resFile = resFile;
this.name = name;
this.isExtended = isExtended;
}
@Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
- public String getResFile() {
+ public Path getResFile() {
return resFile;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionOperation.java
similarity index 95%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionOperation.java
index 5f0c977..6a94a93 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/DescFunctionOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.desc;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.DDLUtils;
@@ -32,7 +32,6 @@ import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Set;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -48,7 +47,7 @@ public class DescFunctionOperation extends DDLOperation<DescFunctionDesc> {
@Override
public int execute() throws HiveException {
- try (DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) {
+ try (DataOutputStream outStream = DDLUtils.getOutputStream(desc.getResFile(), context)) {
String funcName = desc.getName();
FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(funcName);
Class<?> funcClass = functionInfo == null ? null : functionInfo.getFunctionClass();
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/package-info.java
index 5d553b4..7cddc52 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/desc/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Function describing DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.desc;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java
new file mode 100644
index 0000000..7ab172a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionAnalyzer.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.drop;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.function.AbstractFunctionAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for function dropping commands.
+ */
+@DDLType(type=HiveParser.TOK_DROPFUNCTION)
+public class DropFunctionAnalyzer extends AbstractFunctionAnalyzer {
+ public DropFunctionAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ String functionName = root.getChild(0).getText();
+ boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
+ boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
+ boolean isTemporary = (root.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
+
+ FunctionInfo info = FunctionRegistry.getFunctionInfo(functionName);
+ if (info == null) {
+ if (throwException) {
+ throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
+ } else {
+ return; // Fail silently
+ }
+ } else if (info.isBuiltIn()) {
+ throw new SemanticException(ErrorMsg.DROP_NATIVE_FUNCTION.getMsg(functionName));
+ }
+
+ DropFunctionDesc desc = new DropFunctionDesc(functionName, isTemporary, null);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+ addEntities(functionName, info.getClassName(), isTemporary, null);
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionDesc.java
similarity index 97%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionDesc.java
index bc952e7..f53f517 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.drop;
import java.io.Serializable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionOperation.java
similarity index 98%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionOperation.java
index fae8583..68602f7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/DropFunctionOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.drop;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/package-info.java
index 5d553b4..6a2904e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/drop/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Function dropping DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java
new file mode 100644
index 0000000..4eed5c9
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroAnalyzer.java
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.macro.create;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.Stack;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * Analyzer for macro creation commands.
+ */
+@DDLType(type=HiveParser.TOK_CREATEMACRO)
+public class CreateMacroAnalyzer extends BaseSemanticAnalyzer {
+ public CreateMacroAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ String macroName = root.getChild(0).getText();
+ if (FunctionUtils.isQualifiedFunctionName(macroName)) {
+ throw new SemanticException("Temporary macro cannot be created with a qualified name.");
+ }
+
+ List<FieldSchema> arguments = getColumns((ASTNode)root.getChild(1), true, conf);
+ Set<String> actualColumnNames = getActualColumnNames(root, arguments);
+
+ RowResolver rowResolver = new RowResolver();
+ ArrayList<String> macroColumnNames = new ArrayList<String>(arguments.size());
+ ArrayList<TypeInfo> macroColumnTypes = new ArrayList<TypeInfo>(arguments.size());
+
+ getMacroColumnData(arguments, actualColumnNames, rowResolver, macroColumnNames, macroColumnTypes);
+ ExprNodeDesc body = getBody(root, arguments, rowResolver);
+
+ CreateMacroDesc desc = new CreateMacroDesc(macroName, macroColumnNames, macroColumnTypes, body);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+ Database database = getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
+ // This restricts macro creation to privileged users.
+ outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
+ }
+
+ private Set<String> getActualColumnNames(ASTNode root, List<FieldSchema> arguments) throws SemanticException {
+ final Set<String> actualColumnNames = new HashSet<String>();
+
+ if (!arguments.isEmpty()) {
+ // Walk down expression to see which arguments are actually used.
+ Node expression = (Node) root.getChild(2);
+ PreOrderWalker walker = new PreOrderWalker(new Dispatcher() {
+ @Override
+ public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException {
+ if (nd instanceof ASTNode) {
+ ASTNode node = (ASTNode)nd;
+ if (node.getType() == HiveParser.TOK_TABLE_OR_COL) {
+ actualColumnNames.add(node.getChild(0).getText());
+ }
+ }
+ return null;
+ }
+ });
+ walker.startWalking(Collections.singletonList(expression), null);
+ }
+
+ return actualColumnNames;
+ }
+
+ private void getMacroColumnData(List<FieldSchema> arguments, Set<String> actualColumnNames, RowResolver rowResolver,
+ ArrayList<String> macroColumnNames, ArrayList<TypeInfo> macroColumnTypes) throws SemanticException {
+ for (FieldSchema argument : arguments) {
+ TypeInfo columnType = TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
+ rowResolver.put("", argument.getName(), new ColumnInfo(argument.getName(), columnType, "", false));
+ macroColumnNames.add(argument.getName());
+ macroColumnTypes.add(columnType);
+ }
+ Set<String> expectedColumnNames = new LinkedHashSet<String>(macroColumnNames);
+ if (!expectedColumnNames.equals(actualColumnNames)) {
+ throw new SemanticException("Expected columns " + expectedColumnNames + " but found " + actualColumnNames);
+ }
+ if (expectedColumnNames.size() != macroColumnNames.size()) {
+ throw new SemanticException("At least one parameter name was used more than once " + macroColumnNames);
+ }
+ }
+
+ private ExprNodeDesc getBody(ASTNode root, List<FieldSchema> arguments, RowResolver rowResolver)
+ throws SemanticException {
+ SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ?
+ new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
+
+ ExprNodeDesc body = arguments.isEmpty() ?
+ sa.genExprNodeDesc((ASTNode)root.getChild(1), rowResolver) :
+ sa.genExprNodeDesc((ASTNode)root.getChild(2), rowResolver);
+ return body;
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroDesc.java
similarity index 97%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroDesc.java
index ba1b82e..181254a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.macro.create;
import java.io.Serializable;
import java.util.List;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroOperation.java
similarity index 96%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroOperation.java
index c7787db..9b208d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/CreateMacroOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.macro.create;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/package-info.java
index 5d553b4..30094f6 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/create/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Macro creation DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.macro.create;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java
new file mode 100644
index 0000000..15ce475
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroAnalyzer.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.macro.drop;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for macro dropping commands.
+ */
+@DDLType(type=HiveParser.TOK_DROPMACRO)
+public class DropMacroAnalyzer extends BaseSemanticAnalyzer {
+ public DropMacroAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ String macroName = root.getChild(0).getText();
+ if (FunctionUtils.isQualifiedFunctionName(macroName)) {
+ throw new SemanticException("Temporary macro name cannot be a qualified name.");
+ }
+
+ boolean ifExists = (root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
+ boolean throwException = !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
+ if (throwException && FunctionRegistry.getFunctionInfo(macroName) == null) {
+ throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(macroName));
+ }
+
+ DropMacroDesc desc = new DropMacroDesc(macroName);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+ Database database = getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
+ // This restricts macro dropping to privileged users.
+ outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroDesc.java
similarity index 96%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroDesc.java
index a09bfb4..a4d0ba5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.macro.drop;
import java.io.Serializable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroOperation.java
similarity index 96%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroOperation.java
index f7e5acb..ac742d4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/DropMacroOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.macro.drop;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/package-info.java
index 5d553b4..887acb0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/macro/drop/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Macro dropping DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.macro.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java
new file mode 100644
index 0000000..06c6622
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsAnalyzer.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.reload;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for reloading functions commands.
+ */
+@DDLType(type=HiveParser.TOK_RELOADFUNCTIONS)
+public class ReloadFunctionsAnalyzer extends BaseSemanticAnalyzer {
+ public ReloadFunctionsAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new ReloadFunctionsDesc())));
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsDesc.java
similarity index 95%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsDesc.java
index f87ee55..9eef23d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.reload;
import java.io.Serializable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsOperation.java
similarity index 96%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsOperation.java
index 2ab119a..8af68a1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/ReloadFunctionsOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.reload;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/package-info.java
index 5d553b4..51fd37b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/reload/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Function reloading DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.reload;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java
new file mode 100644
index 0000000..3926585
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsAnalyzer.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function.show;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for showing functions commands.
+ */
+@DDLType(type=HiveParser.TOK_SHOWFUNCTIONS)
+public class ShowFunctionsAnalyzer extends BaseSemanticAnalyzer {
+ public ShowFunctionsAnalyzer(QueryState queryState) throws SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ ctx.setResFile(ctx.getLocalTmpPath());
+
+ ShowFunctionsDesc desc;
+ if (root.getChildCount() > 0) {
+ assert (root.getChildCount() == 2);
+ assert (root.getChild(0).getType() == HiveParser.KW_LIKE);
+ String functionNames = stripQuotes(root.getChild(1).getText());
+ desc = new ShowFunctionsDesc(ctx.getResFile(), functionNames);
+ } else {
+ desc = new ShowFunctionsDesc(ctx.getResFile());
+ }
+ Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+ rootTasks.add(task);
+
+ task.setFetchSource(true);
+ setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA));
+ }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsDesc.java
similarity index 97%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsDesc.java
index 9784b1a..dc77233 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsDesc.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.show;
import java.io.Serializable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsOperation.java
similarity index 98%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsOperation.java
index 14a8f92..b94fee5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/ShowFunctionsOperation.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.ddl.function;
+package org.apache.hadoop.hive.ql.ddl.function.show;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.ddl.DDLUtils;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/package-info.java
similarity index 62%
copy from ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/package-info.java
index 5d553b4..d69d2b1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/show/package-info.java
@@ -15,23 +15,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.junit.Assert;
-
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestDropMacroDesc {
- private String name;
- @Before
- public void setup() throws Exception {
- name = "fixed_number";
- }
- @Test
- public void testCreateMacroDesc() throws Exception {
- DropMacroDesc desc = new DropMacroDesc(name);
- Assert.assertEquals(name, desc.getName());
- }
-}
+/** Showing functions DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.function.show;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 41a51bf..4ef33dc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -70,8 +70,6 @@ import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLDesc;
import org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
-import org.apache.hadoop.hive.ql.ddl.function.DescFunctionDesc;
-import org.apache.hadoop.hive.ql.ddl.function.ShowFunctionsDesc;
import org.apache.hadoop.hive.ql.ddl.misc.CacheMetadataDesc;
import org.apache.hadoop.hive.ql.ddl.misc.MsckDesc;
import org.apache.hadoop.hive.ql.ddl.misc.ShowConfDesc;
@@ -409,10 +407,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
ctx.setResFile(ctx.getLocalTmpPath());
analyzeShowTableProperties(ast);
break;
- case HiveParser.TOK_SHOWFUNCTIONS:
- ctx.setResFile(ctx.getLocalTmpPath());
- analyzeShowFunctions(ast);
- break;
case HiveParser.TOK_SHOWLOCKS:
ctx.setResFile(ctx.getLocalTmpPath());
analyzeShowLocks(ast);
@@ -447,10 +441,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
ctx.setResFile(ctx.getLocalTmpPath());
analyzeShowMaterializedViews(ast);
break;
- case HiveParser.TOK_DESCFUNCTION:
- ctx.setResFile(ctx.getLocalTmpPath());
- analyzeDescFunction(ast);
- break;
case HiveParser.TOK_MSCK:
ctx.setResFile(ctx.getLocalTmpPath());
analyzeMetastoreCheck(ast);
@@ -2546,29 +2536,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
/**
* Add the task according to the parsed command tree. This is used for the CLI
- * command "SHOW FUNCTIONS;".
- *
- * @param ast
- * The parsed command tree.
- * @throws SemanticException
- * Parsin failed
- */
- private void analyzeShowFunctions(ASTNode ast) throws SemanticException {
- ShowFunctionsDesc showFuncsDesc;
- if (ast.getChildCount() > 0) {
- assert (ast.getChildCount() == 2);
- assert (ast.getChild(0).getType() == HiveParser.KW_LIKE);
- String funcNames = stripQuotes(ast.getChild(1).getText());
- showFuncsDesc = new ShowFunctionsDesc(ctx.getResFile(), funcNames);
- } else {
- showFuncsDesc = new ShowFunctionsDesc(ctx.getResFile());
- }
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showFuncsDesc)));
- setFetchTask(createFetchTask(ShowFunctionsDesc.SCHEMA));
- }
-
- /**
- * Add the task according to the parsed command tree. This is used for the CLI
* command "SHOW LOCKS;".
*
* @param ast
@@ -2855,35 +2822,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
ctx.setNeedLockMgr(true);
}
- /**
- * Add the task according to the parsed command tree. This is used for the CLI
- * command "DESCRIBE FUNCTION;".
- *
- * @param ast
- * The parsed command tree.
- * @throws SemanticException
- * Parsing failed
- */
- private void analyzeDescFunction(ASTNode ast) throws SemanticException {
- String funcName;
- boolean isExtended;
-
- if (ast.getChildCount() == 1) {
- funcName = stripQuotes(ast.getChild(0).getText());
- isExtended = false;
- } else if (ast.getChildCount() == 2) {
- funcName = stripQuotes(ast.getChild(0).getText());
- isExtended = true;
- } else {
- throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
- }
-
- DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), funcName, isExtended);
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), descFuncDesc)));
- setFetchTask(createFetchTask(DescFunctionDesc.SCHEMA));
- }
-
-
private void analyzeAlterTableRename(String[] source, ASTNode ast, boolean expectView)
throws SemanticException {
String[] target = getQualifiedTableName((ASTNode) ast.getChild(0));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
deleted file mode 100644
index 8dd5674..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.parse;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.ResourceType;
-import org.apache.hadoop.hive.metastore.api.ResourceUri;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.QueryState;
-import org.apache.hadoop.hive.ql.ddl.DDLWork;
-import org.apache.hadoop.hive.ql.ddl.function.CreateFunctionDesc;
-import org.apache.hadoop.hive.ql.ddl.function.DropFunctionDesc;
-import org.apache.hadoop.hive.ql.ddl.function.ReloadFunctionsDesc;
-import org.apache.hadoop.hive.ql.exec.FunctionInfo;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.FunctionUtils;
-import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.PlanUtils;
-
-/**
- * FunctionSemanticAnalyzer.
- *
- */
-public class FunctionSemanticAnalyzer extends BaseSemanticAnalyzer {
- private static final Logger LOG = LoggerFactory.getLogger(FunctionSemanticAnalyzer.class);
- private static final Logger SESISON_STATE_LOG= LoggerFactory.getLogger("SessionState");
-
- public FunctionSemanticAnalyzer(QueryState queryState) throws SemanticException {
- super(queryState);
- }
-
- @Override
- public void analyzeInternal(ASTNode ast) throws SemanticException {
- if (ast.getType() == HiveParser.TOK_CREATEFUNCTION) {
- analyzeCreateFunction(ast);
- } else if (ast.getType() == HiveParser.TOK_DROPFUNCTION) {
- analyzeDropFunction(ast);
- } else if (ast.getType() == HiveParser.TOK_RELOADFUNCTIONS) {
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new ReloadFunctionsDesc())));
- }
-
- LOG.info("analyze done");
- }
-
- private void analyzeCreateFunction(ASTNode ast) throws SemanticException {
- // ^(TOK_CREATEFUNCTION identifier StringLiteral ({isTempFunction}? => TOK_TEMPORARY))
- String functionName = ast.getChild(0).getText().toLowerCase();
- boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
- String className = unescapeSQLString(ast.getChild(1).getText());
-
- // Temp functions are not allowed to have qualified names.
- if (isTemporaryFunction && FunctionUtils.isQualifiedFunctionName(functionName)) {
- throw new SemanticException("Temporary function cannot be created with a qualified name.");
- }
-
- // find any referenced resources
- List<ResourceUri> resources = getResourceList(ast);
- if (!isTemporaryFunction && resources == null) {
- SESISON_STATE_LOG.warn("permanent functions created without USING clause will not be replicated.");
- }
-
- CreateFunctionDesc desc =
- new CreateFunctionDesc(functionName, className, isTemporaryFunction, resources, null);
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-
- addEntities(functionName, className, isTemporaryFunction, resources);
- }
-
- private void analyzeDropFunction(ASTNode ast) throws SemanticException {
- // ^(TOK_DROPFUNCTION identifier ifExists? $temp?)
- String functionName = ast.getChild(0).getText();
- boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
- // we want to signal an error if the function doesn't exist and we're
- // configured not to ignore this
- boolean throwException =
- !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
-
- FunctionInfo info = FunctionRegistry.getFunctionInfo(functionName);
- if (info == null) {
- if (throwException) {
- throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
- } else {
- // Fail silently
- return;
- }
- } else if (info.isBuiltIn()) {
- throw new SemanticException(ErrorMsg.DROP_NATIVE_FUNCTION.getMsg(functionName));
- }
-
- boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
- DropFunctionDesc desc = new DropFunctionDesc(functionName, isTemporaryFunction, null);
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-
- addEntities(functionName, info.getClassName(), isTemporaryFunction, null);
- }
-
- private ResourceType getResourceType(ASTNode token) throws SemanticException {
- switch (token.getType()) {
- case HiveParser.TOK_JAR:
- return ResourceType.JAR;
- case HiveParser.TOK_FILE:
- return ResourceType.FILE;
- case HiveParser.TOK_ARCHIVE:
- return ResourceType.ARCHIVE;
- default:
- throw new SemanticException("Unexpected token " + token.toString());
- }
- }
-
- private List<ResourceUri> getResourceList(ASTNode ast) throws SemanticException {
- List<ResourceUri> resources = null;
- ASTNode resourcesNode = (ASTNode) ast.getFirstChildWithType(HiveParser.TOK_RESOURCE_LIST);
-
- if (resourcesNode != null) {
- resources = new ArrayList<ResourceUri>();
- for (int idx = 0; idx < resourcesNode.getChildCount(); ++idx) {
- // ^(TOK_RESOURCE_URI $resType $resPath)
- ASTNode resNode = (ASTNode) resourcesNode.getChild(idx);
- if (resNode.getToken().getType() != HiveParser.TOK_RESOURCE_URI) {
- throw new SemanticException("Expected token type TOK_RESOURCE_URI but found "
- + resNode.getToken().toString());
- }
- if (resNode.getChildCount() != 2) {
- throw new SemanticException("Expected 2 child nodes of TOK_RESOURCE_URI but found "
- + resNode.getChildCount());
- }
- ASTNode resTypeNode = (ASTNode) resNode.getChild(0);
- ASTNode resUriNode = (ASTNode) resNode.getChild(1);
- ResourceType resourceType = getResourceType(resTypeNode);
- resources.add(new ResourceUri(resourceType, PlanUtils.stripQuotes(resUriNode.getText())));
- }
- }
-
- return resources;
- }
-
- /**
- * Add write entities to the semantic analyzer to restrict function creation to privileged users.
- */
- private void addEntities(String functionName, String className, boolean isTemporaryFunction,
- List<ResourceUri> resources) throws SemanticException {
- // If the function is being added under a database 'namespace', then add an entity representing
- // the database (only applicable to permanent/metastore functions).
- // We also add a second entity representing the function name.
- // The authorization api implementation can decide which entities it wants to use to
- // authorize the create/drop function call.
-
- // Add the relevant database 'namespace' as a WriteEntity
- Database database = null;
-
- // temporary functions don't have any database 'namespace' associated with it,
- // it matters only for permanent functions
- if (!isTemporaryFunction) {
- try {
- String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
- String dbName = qualifiedNameParts[0];
- functionName = qualifiedNameParts[1];
- database = getDatabase(dbName);
- } catch (HiveException e) {
- LOG.error("Failed to get database ", e);
- throw new SemanticException(e);
- }
- }
- if (database != null) {
- outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
- }
-
- // Add the function name as a WriteEntity
- outputs.add(new WriteEntity(database, functionName, className, Type.FUNCTION,
- WriteEntity.WriteType.DDL_NO_LOCK));
-
- if (resources != null) {
- for (ResourceUri resource : resources) {
- String uriPath = resource.getUri();
- outputs.add(toWriteEntity(uriPath));
- }
- }
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
deleted file mode 100644
index e79512e..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.parse;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.Stack;
-import java.util.LinkedHashSet;
-
-import org.apache.hadoop.hive.metastore.Warehouse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.QueryState;
-import org.apache.hadoop.hive.ql.ddl.DDLWork;
-import org.apache.hadoop.hive.ql.ddl.function.CreateMacroDesc;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
-import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.FunctionUtils;
-import org.apache.hadoop.hive.ql.exec.TaskFactory;
-import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-
-/**
- * MacroSemanticAnalyzer.
- *
- */
-public class MacroSemanticAnalyzer extends BaseSemanticAnalyzer {
- private static final Logger LOG = LoggerFactory
- .getLogger(MacroSemanticAnalyzer.class);
-
- public MacroSemanticAnalyzer(QueryState queryState) throws SemanticException {
- super(queryState);
- }
-
- @Override
- public void analyzeInternal(ASTNode ast) throws SemanticException {
- if (ast.getToken().getType() == HiveParser.TOK_CREATEMACRO) {
- LOG.debug("Analyzing create macro " + ast.dump());
- analyzeCreateMacro(ast);
- }
- if (ast.getToken().getType() == HiveParser.TOK_DROPMACRO) {
- LOG.debug("Analyzing drop macro " + ast.dump());
- analyzeDropMacro(ast);
- }
- }
-
- private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
- String functionName = ast.getChild(0).getText();
-
- // Temp macros are not allowed to have qualified names.
- if (FunctionUtils.isQualifiedFunctionName(functionName)) {
- throw new SemanticException("Temporary macro cannot be created with a qualified name.");
- }
-
- List<FieldSchema> arguments =
- BaseSemanticAnalyzer.getColumns((ASTNode)ast.getChild(1), true, conf);
- boolean isNoArgumentMacro = arguments.size() == 0;
- RowResolver rowResolver = new RowResolver();
- ArrayList<String> macroColNames = new ArrayList<String>(arguments.size());
- ArrayList<TypeInfo> macroColTypes = new ArrayList<TypeInfo>(arguments.size());
- final Set<String> actualColumnNames = new HashSet<String>();
-
- if(!isNoArgumentMacro) {
- /*
- * Walk down expression to see which arguments are actually used.
- */
- Node expression = (Node) ast.getChild(2);
- PreOrderWalker walker = new PreOrderWalker(new Dispatcher() {
- @Override
- public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
- throws SemanticException {
- if(nd instanceof ASTNode) {
- ASTNode node = (ASTNode)nd;
- if(node.getType() == HiveParser.TOK_TABLE_OR_COL) {
- actualColumnNames.add(node.getChild(0).getText());
- }
- }
- return null;
- }
- });
- walker.startWalking(Collections.singletonList(expression), null);
- }
- for (FieldSchema argument : arguments) {
- TypeInfo colType =
- TypeInfoUtils.getTypeInfoFromTypeString(argument.getType());
- rowResolver.put("", argument.getName(),
- new ColumnInfo(argument.getName(), colType, "", false));
- macroColNames.add(argument.getName());
- macroColTypes.add(colType);
- }
- Set<String> expectedColumnNames = new LinkedHashSet<String>(macroColNames);
- if(!expectedColumnNames.equals(actualColumnNames)) {
- throw new SemanticException("Expected columns " + expectedColumnNames + " but found "
- + actualColumnNames);
- }
- if(expectedColumnNames.size() != macroColNames.size()) {
- throw new SemanticException("At least one parameter name was used more than once "
- + macroColNames);
- }
- SemanticAnalyzer sa = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED) ?
- new CalcitePlanner(queryState) : new SemanticAnalyzer(queryState);
- ;
- ExprNodeDesc body;
- if(isNoArgumentMacro) {
- body = sa.genExprNodeDesc((ASTNode)ast.getChild(1), rowResolver);
- } else {
- body = sa.genExprNodeDesc((ASTNode)ast.getChild(2), rowResolver);
- }
- CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, macroColTypes, body);
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-
- addEntities();
- }
-
- private void analyzeDropMacro(ASTNode ast) throws SemanticException {
- String functionName = ast.getChild(0).getText();
- boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
- // we want to signal an error if the function doesn't exist and we're
- // configured not to ignore this
- boolean throwException =
- !ifExists && !HiveConf.getBoolVar(conf, ConfVars.DROP_IGNORES_NON_EXISTENT);
-
- // Temp macros are not allowed to have qualified names.
- if (FunctionUtils.isQualifiedFunctionName(functionName)) {
- throw new SemanticException("Temporary macro name cannot be a qualified name.");
- }
-
- if (throwException && FunctionRegistry.getFunctionInfo(functionName) == null) {
- throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg(functionName));
- }
-
- DropMacroDesc desc = new DropMacroDesc(functionName);
- rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-
- addEntities();
- }
-
- private void addEntities() throws SemanticException {
- Database database = getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
- // This restricts macro creation to privileged users.
- outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
- }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 763e996..3afead0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -300,14 +300,12 @@ public final class SemanticAnalyzerFactory {
case HiveParser.TOK_DROPVIEW:
case HiveParser.TOK_DROP_MATERIALIZED_VIEW:
case HiveParser.TOK_DESCTABLE:
- case HiveParser.TOK_DESCFUNCTION:
case HiveParser.TOK_MSCK:
case HiveParser.TOK_SHOWTABLES:
case HiveParser.TOK_SHOWCOLUMNS:
case HiveParser.TOK_SHOW_TABLESTATUS:
case HiveParser.TOK_SHOW_TBLPROPERTIES:
case HiveParser.TOK_SHOW_CREATETABLE:
- case HiveParser.TOK_SHOWFUNCTIONS:
case HiveParser.TOK_SHOWPARTITIONS:
case HiveParser.TOK_SHOWLOCKS:
case HiveParser.TOK_SHOWDBLOCKS:
@@ -349,18 +347,9 @@ public final class SemanticAnalyzerFactory {
case HiveParser.TOK_DROP_MAPPING:
return new DDLSemanticAnalyzer(queryState);
- case HiveParser.TOK_CREATEFUNCTION:
- case HiveParser.TOK_DROPFUNCTION:
- case HiveParser.TOK_RELOADFUNCTIONS:
- return new FunctionSemanticAnalyzer(queryState);
-
case HiveParser.TOK_ANALYZE:
return new ColumnStatsSemanticAnalyzer(queryState);
- case HiveParser.TOK_CREATEMACRO:
- case HiveParser.TOK_DROPMACRO:
- return new MacroSemanticAnalyzer(queryState);
-
case HiveParser.TOK_UPDATE_TABLE:
case HiveParser.TOK_DELETE_FROM:
return new UpdateDeleteSemanticAnalyzer(queryState);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
index bc891f7..ae66366 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.metastore.ReplChangeManager;
import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
-import org.apache.hadoop.hive.ql.ddl.function.CreateFunctionDesc;
+import org.apache.hadoop.hive.ql.ddl.function.create.CreateFunctionDesc;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
import org.apache.hadoop.hive.ql.exec.ReplCopyTask;
import org.apache.hadoop.hive.ql.exec.Task;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
index 11203f1..167679f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hive.ql.parse.repl.load.message;
import org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
-import org.apache.hadoop.hive.ql.ddl.function.DropFunctionDesc;
+import org.apache.hadoop.hive.ql.ddl.function.drop.DropFunctionDesc;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index fff0a3d..0030649 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -35,7 +36,6 @@ import org.junit.Assert;
public class TestMacroSemanticAnalyzer {
- private MacroSemanticAnalyzer analyzer;
private QueryState queryState;
private HiveConf conf;
private Context context;
@@ -46,13 +46,14 @@ public class TestMacroSemanticAnalyzer {
conf = queryState.getConf();
SessionState.start(conf);
context = new Context(conf);
- analyzer = new MacroSemanticAnalyzer(queryState);
}
private ASTNode parse(String command) throws Exception {
return ParseUtils.parse(command);
}
+
private void analyze(ASTNode ast) throws Exception {
+ BaseSemanticAnalyzer analyzer = DDLSemanticAnalyzerFactory.getAnalyzer(ast, queryState);
analyzer.analyze(ast, context);
List<Task<?>> rootTasks = analyzer.getRootTasks();
Assert.assertEquals(1, rootTasks.size());
@@ -66,71 +67,87 @@ public class TestMacroSemanticAnalyzer {
public void testDropMacroDoesNotExist() throws Exception {
analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
}
+
@Test
public void testDropMacroExistsDoNotIgnoreErrors() throws Exception {
conf.setBoolVar(ConfVars.DROP_IGNORES_NON_EXISTENT, false);
FunctionRegistry.registerTemporaryUDF("SOME_MACRO", GenericUDFMacro.class);
analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
}
+
@Test
public void testDropMacro() throws Exception {
FunctionRegistry.registerTemporaryUDF("SOME_MACRO", GenericUDFMacro.class);
analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
}
+
@Test(expected = SemanticException.class)
public void testDropMacroNonExistent() throws Exception {
conf.setBoolVar(ConfVars.DROP_IGNORES_NON_EXISTENT, false);
analyze(parse("DROP TEMPORARY MACRO SOME_MACRO"));
}
+
@Test
public void testDropMacroNonExistentWithIfExists() throws Exception {
analyze(parse("DROP TEMPORARY MACRO IF EXISTS SOME_MACRO"));
}
+
@Test
public void testDropMacroNonExistentWithIfExistsDoNotIgnoreNonExistent() throws Exception {
conf.setBoolVar(ConfVars.DROP_IGNORES_NON_EXISTENT, false);
analyze(parse("DROP TEMPORARY MACRO IF EXISTS SOME_MACRO"));
}
+
@Test
public void testZeroInputParamters() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO FIXED_NUMBER() 1"));
}
+
@Test
public void testOneInputParamters() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))"));
}
+
@Test
public void testTwoInputParamters() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO DUMB_ADD (x INT, y INT) x + y"));
}
+
@Test
public void testThreeInputParamters() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO DUMB_ADD (x INT, y INT, z INT) x + y + z"));
}
+
@Test(expected = ParseException.class)
public void testCannotUseReservedWordAsName() throws Exception {
parse("CREATE TEMPORARY MACRO DOUBLE (x DOUBLE) 1.0 / (1.0 + EXP(-x))");
}
+
@Test(expected = ParseException.class)
public void testNoBody() throws Exception {
parse("CREATE TEMPORARY MACRO DUMB_MACRO()");
}
+
@Test(expected = SemanticException.class)
public void testUnknownInputParameter() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT) x + y + z"));
}
+
@Test(expected = SemanticException.class)
public void testOneUnusedParameterName() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT) x"));
}
+
@Test(expected = SemanticException.class)
public void testTwoUnusedParameterNames() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, y INT, z INT) x"));
}
+
@Test(expected = SemanticException.class)
public void testTwoDuplicateParameterNames() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, x INT) x + x"));
}
+
@Test(expected = SemanticException.class)
public void testThreeDuplicateParameters() throws Exception {
analyze(parse("CREATE TEMPORARY MACRO BAD_MACRO (x INT, x INT, x INT) x + x + x"));
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
index 1db5a00..4a828cd 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestSemanticAnalyzerFactory.java
@@ -20,31 +20,32 @@ package org.apache.hadoop.hive.ql.parse;
import org.junit.Assert;
import org.antlr.runtime.CommonToken;
-import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.function.macro.create.CreateMacroAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.function.macro.drop.DropMacroAnalyzer;
import org.junit.Before;
import org.junit.Test;
public class TestSemanticAnalyzerFactory {
private QueryState queryState;
- private HiveConf conf;
-
+
@Before
public void setup() throws Exception {
queryState = new QueryState.Builder().build();
- conf = queryState.getConf();
}
+
@Test
public void testCreate() throws Exception {
BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory.
get(queryState, new ASTNode(new CommonToken(HiveParser.TOK_CREATEMACRO)));
- Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer);
+ Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof CreateMacroAnalyzer);
}
+
@Test
public void testDrop() throws Exception {
BaseSemanticAnalyzer analyzer = SemanticAnalyzerFactory.
get(queryState, new ASTNode(new CommonToken(HiveParser.TOK_DROPMACRO)));
- Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof MacroSemanticAnalyzer);
+ Assert.assertTrue(analyzer.getClass().getSimpleName(), analyzer instanceof DropMacroAnalyzer);
}
}
\ No newline at end of file
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
index 47849a1..0cf6a5b 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
@@ -21,7 +21,7 @@ import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
-import org.apache.hadoop.hive.ql.ddl.function.CreateMacroDesc;
+import org.apache.hadoop.hive.ql.ddl.function.macro.create.CreateMacroDesc;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.junit.Before;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
index 5d553b4..eaf39a3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hive.ql.plan;
-import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
+import org.apache.hadoop.hive.ql.ddl.function.macro.drop.DropMacroDesc;
import org.junit.Assert;
import org.junit.Before;