You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mg...@apache.org on 2019/10/01 07:33:44 UTC

[hive] branch master updated: HIVE-22218 Break up DDLSemanticAnalyzer - extract Workload Management related analyzers (Miklos Gergely reviewed by Jesus Camacho Rodriguez)

This is an automated email from the ASF dual-hosted git repository.

mgergely pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 3dc5926  HIVE-22218 Break up DDLSemanticAnalyzer - extract Workload Management related analyzers (Miklos Gergely reviewed by Jesus Camacho Rodriguez)
3dc5926 is described below

commit 3dc59264b30a8aea53d1d6f06e17f97b88444008
Author: miklosgergely <mg...@cloudera.com>
AuthorDate: Tue Sep 17 00:08:50 2019 +0200

    HIVE-22218 Break up DDLSemanticAnalyzer - extract Workload Management related analyzers (Miklos Gergely reviewed by Jesus Camacho Rodriguez)
---
 .../workloadmanagement/AlterResourcePlanDesc.java  |   87 -
 .../ddl/workloadmanagement/AlterWMMappingDesc.java |   44 -
 .../ql/ddl/workloadmanagement/AlterWMPoolDesc.java |   52 -
 .../ddl/workloadmanagement/AlterWMTriggerDesc.java |   45 -
 .../workloadmanagement/CreateWMMappingDesc.java    |   45 -
 .../workloadmanagement/CreateWMTriggerDesc.java    |   45 -
 .../hive/ql/ddl/workloadmanagement/WMUtils.java    |   19 +-
 .../mapping/AbstractVMMappingAnalyzer.java         |   62 +
 .../mapping/alter/AlterWMMappingAnalyzer.java      |   42 +
 .../mapping/alter/AlterWMMappingDesc.java          |   73 +
 .../alter}/AlterWMMappingOperation.java            |   11 +-
 .../alter/package-info.java}                       |   24 +-
 .../mapping/create/CreateWMMappingAnalyzer.java    |   42 +
 .../mapping/create/CreateWMMappingDesc.java        |   73 +
 .../create}/CreateWMMappingOperation.java          |   11 +-
 .../create/package-info.java}                      |   24 +-
 .../mapping/drop/DropWMMappingAnalyzer.java        |   56 +
 .../{ => mapping/drop}/DropWMMappingDesc.java      |   31 +-
 .../{ => mapping/drop}/DropWMMappingOperation.java |    6 +-
 .../drop/package-info.java}                        |   24 +-
 .../{WMUtils.java => mapping/package-info.java}    |   24 +-
 .../pool/alter/AlterWMPoolAnalyzer.java            |   91 +
 .../pool/alter/AlterWMPoolDesc.java                |   88 +
 .../{ => pool/alter}/AlterWMPoolOperation.java     |   20 +-
 .../{WMUtils.java => pool/alter/package-info.java} |   24 +-
 .../pool/create/CreateWMPoolAnalyzer.java          |   93 +
 .../pool/create/CreateWMPoolDesc.java              |   73 +
 .../{ => pool/create}/CreateWMPoolOperation.java   |   17 +-
 .../create/package-info.java}                      |   24 +-
 .../pool/drop/DropWMPoolAnalyzer.java              |   55 +
 .../{ => pool/drop}/DropWMPoolDesc.java            |   15 +-
 .../{ => pool/drop}/DropWMPoolOperation.java       |    4 +-
 .../{WMUtils.java => pool/drop/package-info.java}  |   24 +-
 .../AbstractAlterResourcePlanStatusOperation.java} |   65 +-
 .../disable/AlterResourcePlanDisableAnalyzer.java  |   53 +
 .../disable/AlterResourcePlanDisableDesc.java}     |   23 +-
 .../disable/AlterResourcePlanDisableOperation.java |   58 +
 .../alter/disable/package-info.java}               |   24 +-
 .../enable/AlterResourcePlanEnableAnalyzer.java    |   86 +
 .../alter/enable/AlterResourcePlanEnableDesc.java  |   68 +
 .../enable/AlterResourcePlanEnableOperation.java   |   56 +
 .../alter/enable/package-info.java}                |   24 +-
 .../alter/package-info.java}                       |   24 +-
 .../rename/AlterResourcePlanRenameAnalyzer.java    |   56 +
 .../alter/rename/AlterResourcePlanRenameDesc.java  |   51 +
 .../rename/AlterResourcePlanRenameOperation.java}  |   18 +-
 .../alter/rename/package-info.java}                |   24 +-
 .../replace/AlterResourcePlanReplaceAnalyzer.java  |   56 +
 .../replace/AlterResourcePlanReplaceDesc.java      |   51 +
 .../replace/AlterResourcePlanReplaceOperation.java |   59 +
 .../alter/replace/package-info.java}               |   24 +-
 .../alter/set/AlterResourcePlanSetAnalyzer.java    |   77 +
 .../alter/set/AlterResourcePlanSetDesc.java        |   58 +
 .../alter/set/AlterResourcePlanSetOperation.java   |   58 +
 .../alter/set/package-info.java}                   |   24 +-
 .../unset/AlterResourcePlanUnsetAnalyzer.java      |   78 +
 .../alter/unset/AlterResourcePlanUnsetDesc.java    |   60 +
 .../unset/AlterResourcePlanUnsetOperation.java     |   58 +
 .../alter/unset/package-info.java}                 |   24 +-
 .../AlterResourcePlanValidateAnalyzer.java         |   60 +
 .../validate/AlterResourcePlanValidateDesc.java}   |   34 +-
 .../AlterResourcePlanValidateOperation.java        |   53 +
 .../alter/validate/package-info.java}              |   24 +-
 .../create/CreateResourcePlanAnalyzer.java         |   83 +
 .../create}/CreateResourcePlanDesc.java            |   19 +-
 .../create}/CreateResourcePlanOperation.java       |    4 +-
 .../create/package-info.java}                      |   24 +-
 .../drop/DropResourcePlanAnalyzer.java             |   66 +
 .../drop}/DropResourcePlanDesc.java                |   18 +-
 .../drop}/DropResourcePlanOperation.java           |    4 +-
 .../drop/package-info.java}                        |   24 +-
 .../show/ShowResourcePlanAnalyzer.java             |   60 +
 .../show}/ShowResourcePlanDesc.java                |   16 +-
 .../show}/ShowResourcePlanOperation.java           |    8 +-
 .../show/package-info.java}                        |   24 +-
 .../workloadmanagement/trigger/TriggerUtils.java   |   73 +
 .../trigger/alter/AlterWMTriggerAnalyzer.java      |   58 +
 .../trigger/alter/AlterWMTriggerDesc.java          |   66 +
 .../alter}/AlterWMTriggerOperation.java            |   12 +-
 .../alter/package-info.java}                       |   24 +-
 .../trigger/create/CreateWMTriggerAnalyzer.java    |   58 +
 .../trigger/create/CreateWMTriggerDesc.java        |   66 +
 .../create}/CreateWMTriggerOperation.java          |   12 +-
 .../create/package-info.java}                      |   24 +-
 .../trigger/drop/DropWMTriggerAnalyzer.java        |   53 +
 .../{ => trigger/drop}/DropWMTriggerDesc.java      |   16 +-
 .../{ => trigger/drop}/DropWMTriggerOperation.java |    4 +-
 .../drop/package-info.java}                        |   24 +-
 .../{WMUtils.java => trigger/package-info.java}    |   24 +-
 .../pool/add/AlterPoolAddTriggerAnalyzer.java      |   58 +
 .../pool/add}/AlterPoolAddTriggerDesc.java         |   26 +-
 .../pool/add}/AlterPoolAddTriggerOperation.java    |   11 +-
 .../pool/add/package-info.java}                    |   24 +-
 .../pool/drop/AlterPoolDropTriggerAnalyzer.java    |   58 +
 .../pool/drop}/AlterPoolDropTriggerDesc.java       |   26 +-
 .../pool/drop}/AlterPoolDropTriggerOperation.java  |   11 +-
 .../pool/drop/package-info.java}                   |   24 +-
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |  513 --
 .../org/apache/hadoop/hive/ql/parse/HiveParser.g   |   11 +-
 .../hadoop/hive/ql/parse/ResourcePlanParser.g      |   39 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java     |   13 -
 .../apache/hadoop/hive/ql/plan/HiveOperation.java  |    7 +-
 ql/src/test/queries/clientpositive/resourceplan.q  |   19 +-
 .../results/clientpositive/llap/resourceplan.q.out |  243 +-
 .../test/results/clientpositive/resourceplan.q.out | 5440 --------------------
 105 files changed, 3093 insertions(+), 7090 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java
deleted file mode 100644
index 2df4a42..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanDesc.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
-import org.apache.hadoop.hive.ql.ddl.DDLDesc;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-/**
- * DDL task description for ALTER RESOURCE PLAN commands.
- */
-@Explain(displayName = "Alter Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class AlterResourcePlanDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = -3514685833183437279L;
-
-  public static final String SCHEMA = "error#string";
-
-  private final WMNullableResourcePlan resourcePlan;
-  private final String planName;
-  private final boolean validate;
-  private final boolean isEnableActivate;
-  private final boolean isForceDeactivate;
-  private final boolean isReplace;
-  private final String resFile;
-
-  public AlterResourcePlanDesc(WMNullableResourcePlan resourcePlan, String planName, boolean validate,
-      boolean isEnableActivate, boolean isForceDeactivate, boolean isReplace, String resFile) {
-    this.resourcePlan = resourcePlan;
-    this.planName = planName;
-    this.validate = validate;
-    this.isEnableActivate = isEnableActivate;
-    this.isForceDeactivate = isForceDeactivate;
-    this.isReplace = isReplace;
-    this.resFile = resFile;
-  }
-
-  @Explain(displayName="Resource plan changed fields", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMNullableResourcePlan getResourcePlan() {
-    return resourcePlan;
-  }
-
-  @Explain(displayName="Resource plan to modify", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
-  }
-
-  @Explain(displayName="shouldValidate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public boolean shouldValidate() {
-    return validate;
-  }
-
-  public boolean isEnableActivate() {
-    return isEnableActivate;
-  }
-
-  public boolean isForceDeactivate() {
-    return isForceDeactivate;
-  }
-
-  public boolean isReplace() {
-    return isReplace;
-  }
-
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
-  public String getResFile() {
-    return resFile;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java
deleted file mode 100644
index 9f259da..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingDesc.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.WMMapping;
-import org.apache.hadoop.hive.ql.ddl.DDLDesc;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-/**
- * DDL task description for ALTER ... MAPPING commands.
- */
-@Explain(displayName = "Alter Mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class AlterWMMappingDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = -442968568922083053L;
-
-  private final WMMapping mapping;
-
-  public AlterWMMappingDesc(WMMapping mapping) {
-    this.mapping = mapping;
-  }
-
-  @Explain(displayName = "mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMMapping getMapping() {
-    return mapping;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java
deleted file mode 100644
index 20f14ab..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolDesc.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.WMNullablePool;
-import org.apache.hadoop.hive.ql.ddl.DDLDesc;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-/**
- * DDL task description for ALTER POOL commands.
- */
-@Explain(displayName = "Alter Pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class AlterWMPoolDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = 4872940135771213510L;
-
-  private final WMNullablePool pool;
-  private final String poolPath;
-
-  public AlterWMPoolDesc(WMNullablePool pool, String poolPath) {
-    this.pool = pool;
-    this.poolPath = poolPath;
-  }
-
-  @Explain(displayName="pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMNullablePool getPool() {
-    return pool;
-  }
-
-  @Explain(displayName="poolPath", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPoolPath() {
-    return poolPath;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java
deleted file mode 100644
index 9bc2516..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerDesc.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.ddl.DDLDesc;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-/**
- * DDL task description for ALTER TRIGGER commands.
- */
-@Explain(displayName="Alter WM Trigger", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class AlterWMTriggerDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = -2105736261687539210L;
-
-  private final WMTrigger trigger;
-
-  public AlterWMTriggerDesc(WMTrigger trigger) {
-    this.trigger = trigger;
-  }
-
-  @Explain(displayName="trigger", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMTrigger getTrigger() {
-    return trigger;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java
deleted file mode 100644
index 16e8906..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingDesc.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.WMMapping;
-import org.apache.hadoop.hive.ql.ddl.DDLDesc;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-/**
- * DDL task description for CREATE ... MAPPING commands.
- */
-@Explain(displayName = "Create Mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class CreateWMMappingDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = -442968568922083053L;
-
-  private final WMMapping mapping;
-
-  public CreateWMMappingDesc(WMMapping mapping) {
-    this.mapping = mapping;
-  }
-
-  @Explain(displayName = "mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMMapping getMapping() {
-    return mapping;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java
deleted file mode 100644
index 48ca2f5..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerDesc.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.ddl.DDLDesc;
-import org.apache.hadoop.hive.ql.plan.Explain;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-/**
- * DDL task description for CREATE TRIGGER commands.
- */
-@Explain(displayName="Create WM Trigger", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class CreateWMTriggerDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = 1705317739121300923L;
-
-  private final WMTrigger trigger;
-
-  public CreateWMTriggerDesc(WMTrigger trigger) {
-    this.trigger = trigger;
-  }
-
-  @Explain(displayName="trigger", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMTrigger getTrigger() {
-    return trigger;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
index 4860ee7..ea40f77 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
@@ -18,23 +18,24 @@
 
 package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
 
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 
 /**
  * Common utilities for Workload Management related ddl operations.
  */
-final class WMUtils {
+public final class WMUtils {
   private WMUtils() {
     throw new UnsupportedOperationException("WMUtils should not be instantiated");
   }
 
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
+  public static String poolPath(Tree root) {
+    StringBuilder builder = new StringBuilder();
+    builder.append(BaseSemanticAnalyzer.unescapeIdentifier(root.getText()));
+    for (int i = 0; i < root.getChildCount(); ++i) {
+      // DOT is not affected
+      builder.append(BaseSemanticAnalyzer.unescapeIdentifier(root.getChild(i).getText()));
     }
+    return builder.toString();
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/AbstractVMMappingAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/AbstractVMMappingAnalyzer.java
new file mode 100644
index 0000000..75deade
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/AbstractVMMappingAnalyzer.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+
+/**
+ * Abstract ancestor of Create and Alter WM Mapping analyzers.
+ */
+public abstract class AbstractVMMappingAnalyzer extends BaseSemanticAnalyzer {
+  public AbstractVMMappingAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() < 4 || root.getChildCount() > 5) {
+      throw new SemanticException("Invalid syntax for create or alter mapping.");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String entityType = root.getChild(1).getText();
+    String entityName = PlanUtils.stripQuotes(root.getChild(2).getText());
+    String poolPath = root.getChild(3).getType() == HiveParser.TOK_UNMANAGED ?
+        null : WMUtils.poolPath(root.getChild(3)); // Null path => unmanaged
+    Integer ordering = root.getChildCount() == 5 ? Integer.valueOf(root.getChild(4).getText()) : null;
+
+    DDLDesc desc = getDesc(resourcePlanName, entityType, entityName, poolPath, ordering);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+
+  protected abstract DDLDesc getDesc(String resourcePlanName, String entityType, String entityName, String poolPath,
+      Integer ordering);
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java
new file mode 100644
index 0000000..0a2c784
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingAnalyzer.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.alter;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.AbstractVMMappingAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter mapping commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_MAPPING)
+public class AlterWMMappingAnalyzer extends AbstractVMMappingAnalyzer {
+  public AlterWMMappingAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  protected DDLDesc getDesc(String resourcePlanName, String entityType, String entityName, String poolPath,
+      Integer ordering) {
+    return new AlterWMMappingDesc(resourcePlanName, entityType, entityName, poolPath, ordering);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingDesc.java
new file mode 100644
index 0000000..6378ddb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingDesc.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.alter;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER ... MAPPING commands.
+ */
+@Explain(displayName = "Alter Mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterWMMappingDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String resourcePlanName;
+  private final String entityType;
+  private final String entityName;
+  private final String poolPath;
+  private final Integer ordering;
+
+  public AlterWMMappingDesc(String resourcePlanName, String entityType, String entityName, String poolPath,
+      Integer ordering) {
+    this.resourcePlanName = resourcePlanName;
+    this.entityType = entityType;
+    this.entityName = entityName;
+    this.poolPath = poolPath;
+    this.ordering = ordering;
+  }
+
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName = "Entity type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getEntityType() {
+    return entityType;
+  }
+
+  @Explain(displayName = "Entity name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getEntityName() {
+    return entityName;
+  }
+
+  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  @Explain(displayName = "Ordering", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public Integer getOrdering() {
+    return ordering;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingOperation.java
similarity index 75%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingOperation.java
index 513e0c9..8aad92d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/AlterWMMappingOperation.java
@@ -16,10 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.alter;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMMapping;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -34,7 +35,13 @@ public class AlterWMMappingOperation extends DDLOperation<AlterWMMappingDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().createOrUpdateWMMapping(desc.getMapping(), true);
+    WMMapping mapping = new WMMapping(desc.getResourcePlanName(), desc.getEntityType(), desc.getEntityName());
+    mapping.setPoolPath(desc.getPoolPath());
+    if (desc.getOrdering() != null) {
+      mapping.setOrdering(desc.getOrdering());
+    }
+
+    context.getDb().createOrUpdateWMMapping(mapping, true);
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/package-info.java
index 4860ee7..bee2b39 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/alter/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter WM Mapping DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.alter;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java
new file mode 100644
index 0000000..065f020
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingAnalyzer.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.create;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.AbstractVMMappingAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for create mapping commands.
+ */
+@DDLType(type=HiveParser.TOK_CREATE_MAPPING)
+public class CreateWMMappingAnalyzer extends AbstractVMMappingAnalyzer {
+  public CreateWMMappingAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  protected DDLDesc getDesc(String resourcePlanName, String entityType, String entityName, String poolPath,
+      Integer ordering) {
+    return new CreateWMMappingDesc(resourcePlanName, entityType, entityName, poolPath, ordering);
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingDesc.java
new file mode 100644
index 0000000..e629de0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingDesc.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.create;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for CREATE ... MAPPING commands.
+ */
+@Explain(displayName = "Create Mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateWMMappingDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String resourcePlanName;
+  private final String entityType;
+  private final String entityName;
+  private final String poolPath;
+  private final Integer ordering;
+
+  public CreateWMMappingDesc(String resourcePlanName, String entityType, String entityName, String poolPath,
+      Integer ordering) {
+    this.resourcePlanName = resourcePlanName;
+    this.entityType = entityType;
+    this.entityName = entityName;
+    this.poolPath = poolPath;
+    this.ordering = ordering;
+  }
+
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName = "Entity type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getEntityType() {
+    return entityType;
+  }
+
+  @Explain(displayName = "Entity name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getEntityName() {
+    return entityName;
+  }
+
+  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  @Explain(displayName = "Ordering", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public Integer getOrdering() {
+    return ordering;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingOperation.java
similarity index 75%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingOperation.java
index b0c16e6..bfd6425 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMMappingOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/CreateWMMappingOperation.java
@@ -16,10 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.create;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMMapping;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -34,7 +35,13 @@ public class CreateWMMappingOperation extends DDLOperation<CreateWMMappingDesc>
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().createOrUpdateWMMapping(desc.getMapping(), false);
+    WMMapping mapping = new WMMapping(desc.getResourcePlanName(), desc.getEntityType(), desc.getEntityName());
+    mapping.setPoolPath(desc.getPoolPath());
+    if (desc.getOrdering() != null) {
+      mapping.setOrdering(desc.getOrdering());
+    }
+
+    context.getDb().createOrUpdateWMMapping(mapping, false);
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/package-info.java
index 4860ee7..12c69f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/create/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Create WM Mapping DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.create;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java
new file mode 100644
index 0000000..6baf8e1
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingAnalyzer.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+
+/**
+ * Analyzer for drop mapping commands.
+ */
+@DDLType(type=HiveParser.TOK_DROP_MAPPING)
+public class DropWMMappingAnalyzer extends BaseSemanticAnalyzer {
+  public DropWMMappingAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode ast) throws SemanticException {
+    if (ast.getChildCount() != 3) {
+      throw new SemanticException("Invalid syntax for drop mapping.");
+    }
+
+    String resourcePlanName = unescapeIdentifier(ast.getChild(0).getText());
+    String entityType = ast.getChild(1).getText();
+    String entityName = PlanUtils.stripQuotes(ast.getChild(2).getText());
+
+    DropWMMappingDesc desc = new DropWMMappingDesc(resourcePlanName, entityType, entityName);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingDesc.java
similarity index 56%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingDesc.java
index 56a6852..f27c549 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingDesc.java
@@ -16,11 +16,10 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.drop;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.metastore.api.WMMapping;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
@@ -30,16 +29,30 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
  */
 @Explain(displayName = "Drop mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class DropWMMappingDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = -1567558687529244218L;
+  private static final long serialVersionUID = 1L;
 
-  private final WMMapping mapping;
+  private final String resourcePlanName;
+  private final String entityType;
+  private final String entityName;
 
-  public DropWMMappingDesc(WMMapping mapping) {
-    this.mapping = mapping;
+  public DropWMMappingDesc(String resourcePlanName, String entityType, String entityName) {
+    this.resourcePlanName = resourcePlanName;
+    this.entityType = entityType;
+    this.entityName = entityName;
   }
 
-  @Explain(displayName = "mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMMapping getMapping() {
-    return mapping;
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName = "Entity type", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getEntityType() {
+    return entityType;
+  }
+
+  @Explain(displayName = "Entity name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getEntityName() {
+    return entityName;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingOperation.java
similarity index 82%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingOperation.java
index 508ec48..7a5e072 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMMappingOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/DropWMMappingOperation.java
@@ -16,10 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.drop;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMMapping;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -34,7 +35,8 @@ public class DropWMMappingOperation extends DDLOperation<DropWMMappingDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().dropWMMapping(desc.getMapping());
+    WMMapping mapping = new WMMapping(desc.getResourcePlanName(), desc.getEntityType(), desc.getEntityName());
+    context.getDb().dropWMMapping(mapping);
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/package-info.java
index 4860ee7..c10903e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/drop/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Drop WM Mapping DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/package-info.java
index 4860ee7..2d85cef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/mapping/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** WM Mapping DDL operations. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.mapping;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java
new file mode 100644
index 0000000..051136a
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolAnalyzer.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.alter;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+
+/**
+ * Analyzer for alter pool commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_POOL)
+public class AlterWMPoolAnalyzer extends BaseSemanticAnalyzer {
+  public AlterWMPoolAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() < 3) {
+      throw new SemanticException("Invalid syntax for alter pool: " + root.toStringTree());
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String poolPath = WMUtils.poolPath(root.getChild(1));
+    Double allocFraction = null;
+    Integer queryParallelism = null;
+    String schedulingPolicy = null;
+    boolean removeSchedulingPolicy = false;
+    String newPath = null;
+
+    for (int i = 2; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      if (child.getChildCount() != 1) {
+        throw new SemanticException("Invalid syntax in alter pool expected parameter.");
+      }
+      Tree param = child.getChild(0);
+      switch (child.getType()) {
+      case HiveParser.TOK_ALLOC_FRACTION:
+        allocFraction = Double.parseDouble(param.getText());
+        break;
+      case HiveParser.TOK_QUERY_PARALLELISM:
+        queryParallelism = Integer.parseInt(param.getText());
+        break;
+      case HiveParser.TOK_SCHEDULING_POLICY:
+        if (param.getType() != HiveParser.TOK_NULL) {
+          schedulingPolicy = PlanUtils.stripQuotes(param.getText());
+        } else {
+          removeSchedulingPolicy = true;
+        }
+        break;
+      case HiveParser.TOK_PATH:
+        newPath = WMUtils.poolPath(param);
+        break;
+      default:
+        throw new SemanticException("Incorrect alter syntax: " + child.toStringTree());
+      }
+    }
+
+    AlterWMPoolDesc desc = new AlterWMPoolDesc(resourcePlanName, poolPath, allocFraction, queryParallelism,
+        schedulingPolicy, removeSchedulingPolicy, newPath);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolDesc.java
new file mode 100644
index 0000000..e3c769b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolDesc.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.alter;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER POOL commands.
+ */
+@Explain(displayName = "Alter Pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterWMPoolDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 4872940135771213510L;
+
+  private final String resourcePlanName;
+  private final String poolPath;
+  private final Double allocFraction;
+  private final Integer queryParallelism;
+  private final String schedulingPolicy;
+  private final boolean removeSchedulingPolicy;
+  private final String newPath;
+
+  public AlterWMPoolDesc(String resourcePlanName, String poolPath, Double allocFraction, Integer queryParallelism,
+      String schedulingPolicy, boolean removeSchedulingPolicy, String newPath) {
+    this.resourcePlanName = resourcePlanName;
+    this.poolPath = poolPath;
+    this.allocFraction = allocFraction;
+    this.queryParallelism = queryParallelism;
+    this.schedulingPolicy = schedulingPolicy;
+    this.removeSchedulingPolicy = removeSchedulingPolicy;
+    this.newPath = newPath;
+  }
+
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  @Explain(displayName = "Alloc fraction", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public Double getAllocFraction() {
+    return allocFraction;
+  }
+
+  @Explain(displayName = "Query parallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public Integer getQueryParallelism() {
+    return queryParallelism;
+  }
+
+  @Explain(displayName = "Scheduling policy", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getSchedulingPolicy() {
+    return schedulingPolicy;
+  }
+
+  @Explain(displayName = "Remove scheduling policy", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED },
+      displayOnlyOnTrue=true)
+  public boolean isRemoveSchedulingPolicy() {
+    return removeSchedulingPolicy;
+  }
+
+  @Explain(displayName = "New path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getNewPath() {
+    return newPath;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolOperation.java
similarity index 63%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolOperation.java
index 059d407..3ba8944 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMPoolOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/AlterWMPoolOperation.java
@@ -16,10 +16,11 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.alter;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMNullablePool;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -34,7 +35,22 @@ public class AlterWMPoolOperation extends DDLOperation<AlterWMPoolDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().alterWMPool(desc.getPool(),  desc.getPoolPath());
+    WMNullablePool pool = new WMNullablePool(desc.getResourcePlanName(), desc.getPoolPath());
+    if (desc.getAllocFraction() != null) {
+      pool.setAllocFraction(desc.getAllocFraction());
+    }
+    if (desc.getQueryParallelism() != null) {
+      pool.setQueryParallelism(desc.getQueryParallelism());
+    }
+    if (desc.getSchedulingPolicy() != null || desc.isRemoveSchedulingPolicy()) {
+      pool.setIsSetSchedulingPolicy(true);
+      pool.setSchedulingPolicy(desc.getSchedulingPolicy());
+    }
+    if (desc.getNewPath() != null) {
+      pool.setPoolPath(desc.getNewPath());
+    }
+
+    context.getDb().alterWMPool(pool, desc.getPoolPath());
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/package-info.java
index 4860ee7..723774b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/alter/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Pool DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.alter;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java
new file mode 100644
index 0000000..0246be0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolAnalyzer.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.create;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+
+/**
+ * Analyzer for create pool commands.
+ */
+@DDLType(type=HiveParser.TOK_CREATE_POOL)
+public class CreateWMPoolAnalyzer extends BaseSemanticAnalyzer {
+  public CreateWMPoolAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    // TODO: allow defaults for e.g. scheduling policy.
+    if (root.getChildCount() < 3) {
+      throw new SemanticException("Expected more arguments: " + root.toStringTree());
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String poolPath = WMUtils.poolPath(root.getChild(1));
+    Double allocFraction = null;
+    Integer queryParallelism = null;
+    String schedulingPolicy = null;
+
+    for (int i = 2; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      if (child.getChildCount() != 1) {
+        throw new SemanticException("Expected 1 paramter for: " + child.getText());
+      }
+
+      String param = child.getChild(0).getText();
+      switch (child.getType()) {
+      case HiveParser.TOK_ALLOC_FRACTION:
+        allocFraction = Double.parseDouble(param);
+        break;
+      case HiveParser.TOK_QUERY_PARALLELISM:
+        queryParallelism = Integer.parseInt(param);
+        break;
+      case HiveParser.TOK_SCHEDULING_POLICY:
+        schedulingPolicy = PlanUtils.stripQuotes(param);
+        break;
+      case HiveParser.TOK_PATH:
+        throw new SemanticException("Invalid parameter path in create pool");
+      default:
+        throw new SemanticException("Invalid parameter " + child.getText() + " in create pool");
+      }
+    }
+
+    if (allocFraction == null) {
+      throw new SemanticException("alloc_fraction should be specified for a pool");
+    }
+    if (queryParallelism == null) {
+      throw new SemanticException("query_parallelism should be specified for a pool");
+    }
+
+    CreateWMPoolDesc desc = new CreateWMPoolDesc(resourcePlanName, poolPath, allocFraction, queryParallelism,
+        schedulingPolicy);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolDesc.java
new file mode 100644
index 0000000..cfc81a5
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolDesc.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.create;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for CREATE POOL commands.
+ */
+@Explain(displayName = "Create Pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateWMPoolDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 4872940135771213510L;
+
+  private final String resourcePlanName;
+  private final String poolPath;
+  private final double allocFraction;
+  private final int queryParallelism;
+  private final String schedulingPolicy;
+
+  public CreateWMPoolDesc(String resourcePlanName, String poolPath, double allocFraction, int queryParallelism,
+      String schedulingPolicy) {
+    this.resourcePlanName = resourcePlanName;
+    this.poolPath = poolPath;
+    this.allocFraction = allocFraction;
+    this.queryParallelism = queryParallelism;
+    this.schedulingPolicy = schedulingPolicy;
+  }
+
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  @Explain(displayName = "Alloc fraction", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public double getAllocFraction() {
+    return allocFraction;
+  }
+
+  @Explain(displayName = "Query parallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public int getQueryParallelism() {
+    return queryParallelism;
+  }
+
+  @Explain(displayName = "Scheduling policy", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getSchedulingPolicy() {
+    return schedulingPolicy;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolOperation.java
similarity index 63%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolOperation.java
index 5298535..c523e79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/CreateWMPoolOperation.java
@@ -16,13 +16,16 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.create;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMPool;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * Operation process of creating a workload management pool.
@@ -34,7 +37,17 @@ public class CreateWMPoolOperation extends DDLOperation<CreateWMPoolDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().createWMPool(desc.getPool());
+    WMPool pool = new WMPool(desc.getResourcePlanName(), desc.getPoolPath());
+    pool.setAllocFraction(desc.getAllocFraction());
+    pool.setQueryParallelism(desc.getQueryParallelism());
+    if (desc.getSchedulingPolicy() != null) {
+      if (!MetaStoreUtils.isValidSchedulingPolicy(desc.getSchedulingPolicy())) {
+        throw new SemanticException("Invalid scheduling policy " + desc.getSchedulingPolicy());
+      }
+      pool.setSchedulingPolicy(desc.getSchedulingPolicy());
+    }
+
+    context.getDb().createWMPool(pool);
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/package-info.java
index 4860ee7..5447711 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/create/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Create Pool DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.create;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java
new file mode 100644
index 0000000..49cf48f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolAnalyzer.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for drop pool commands.
+ */
+@DDLType(type=HiveParser.TOK_DROP_POOL)
+public class DropWMPoolAnalyzer extends BaseSemanticAnalyzer {
+  public DropWMPoolAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 2) {
+      throw new SemanticException("Invalid syntax for drop pool.");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String poolPath = WMUtils.poolPath(root.getChild(1));
+
+    DropWMPoolDesc desc = new DropWMPoolDesc(resourcePlanName, poolPath);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolDesc.java
similarity index 73%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolDesc.java
index 755c957..2eef340 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.drop;
 
 import java.io.Serializable;
 
@@ -31,19 +31,20 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class DropWMPoolDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = -2608462103392563252L;
 
-  private final String planName;
+  private final String resourcePlanName;
   private final String poolPath;
 
-  public DropWMPoolDesc(String planName, String poolPath) {
-    this.planName = planName;
+  public DropWMPoolDesc(String resourcePlanName, String poolPath) {
+    this.resourcePlanName = resourcePlanName;
     this.poolPath = poolPath;
   }
 
-  @Explain(displayName="poolName", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
+  @Explain(displayName="Resource plan name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 
+  @Explain(displayName="Pool path", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getPoolPath() {
     return poolPath;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolOperation.java
similarity index 90%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolOperation.java
index 44564c3..0939e79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMPoolOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/DropWMPoolOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.drop;
 
 import java.io.IOException;
 
@@ -34,7 +34,7 @@ public class DropWMPoolOperation extends DDLOperation<DropWMPoolDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().dropWMPool(desc.getPlanName(), desc.getPoolPath());
+    context.getDb().dropWMPool(desc.getResourcePlanName(), desc.getPoolPath());
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/package-info.java
index 4860ee7..ca51de3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/pool/drop/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Drop Pool DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.pool.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/AbstractAlterResourcePlanStatusOperation.java
similarity index 60%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/AbstractAlterResourcePlanStatusOperation.java
index fe9e7e1..eca1840 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterResourcePlanOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/AbstractAlterResourcePlanStatusOperation.java
@@ -16,86 +16,59 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter;
 
-import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
-import org.apache.hadoop.hive.ql.ddl.DDLUtils;
-import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
-import org.apache.hadoop.hive.ql.exec.tez.WorkloadManager;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
 import java.util.Collection;
 import java.util.concurrent.ExecutionException;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
-import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
-import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
+import org.apache.hadoop.hive.ql.exec.tez.WorkloadManager;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 import com.google.common.util.concurrent.ListenableFuture;
 
 /**
- * Operation process of altering a resource plan.
+ * Abstract ancestor of the enable / disable Resource Plan operations.
  */
-public class AlterResourcePlanOperation extends DDLOperation<AlterResourcePlanDesc> {
-  // Note: the resource plan operations are going to be annotated with namespace based on the config
-  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
-  //       that there exist other namespaces, because one HS2 always operates inside just one and we
-  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
-  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
-  public AlterResourcePlanOperation(DDLOperationContext context, AlterResourcePlanDesc desc) {
+public abstract class AbstractAlterResourcePlanStatusOperation<T extends DDLDesc> extends DDLOperation<T> {
+
+  public AbstractAlterResourcePlanStatusOperation(DDLOperationContext context, T desc) {
     super(context, desc);
   }
 
-  @Override
-  public int execute() throws HiveException, IOException {
-    if (desc.shouldValidate()) {
-      WMValidateResourcePlanResponse result = context.getDb().validateResourcePlan(desc.getPlanName());
-      try (DataOutputStream out = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) {
-        context.getFormatter().showErrors(out, result);
-      } catch (IOException e) {
-        throw new HiveException(e);
-      }
-      return 0;
-    }
-
-    WMNullableResourcePlan resourcePlan = desc.getResourcePlan();
-    WMFullResourcePlan appliedResourcePlan = context.getDb().alterResourcePlan(desc.getPlanName(), resourcePlan,
-        desc.isEnableActivate(), desc.isForceDeactivate(), desc.isReplace());
-
-    boolean isActivate = resourcePlan.getStatus() != null && resourcePlan.getStatus() == WMResourcePlanStatus.ACTIVE;
-    boolean mustHaveAppliedChange = isActivate || desc.isForceDeactivate();
-    if (!mustHaveAppliedChange && !desc.isReplace()) {
-      return 0; // The modification cannot affect an active plan.
+  protected void handleWMServiceChangeIfNeeded(WMFullResourcePlan appliedResourcePlan, boolean isActivate,
+      boolean isForceDeactivate, boolean replace) throws HiveException {
+    boolean mustHaveAppliedChange = isActivate || isForceDeactivate;
+    if (!mustHaveAppliedChange && !replace) {
+      return; // The modification cannot affect an active plan.
     }
     if (appliedResourcePlan == null && !mustHaveAppliedChange) {
-      return 0; // Replacing an inactive plan.
+      return; // Replacing an inactive plan.
     }
 
     WorkloadManager wm = WorkloadManager.getInstance();
     boolean isInTest = HiveConf.getBoolVar(context.getConf(), ConfVars.HIVE_IN_TEST);
     if (wm == null && isInTest) {
-      return 0; // Skip for tests if WM is not present.
+      return; // Skip for tests if WM is not present.
     }
 
-    if ((appliedResourcePlan == null) != desc.isForceDeactivate()) {
+    if ((appliedResourcePlan == null) != isForceDeactivate) {
       throw new HiveException("Cannot get a resource plan to apply; or non-null plan on disable");
       // TODO: shut down HS2?
     }
     assert appliedResourcePlan == null || appliedResourcePlan.getPlan().getStatus() == WMResourcePlanStatus.ACTIVE;
 
-    handleWorkloadManagementServiceChange(wm, isActivate, appliedResourcePlan);
-
-    return 0;
+    handleWMServiceChange(wm, isActivate, appliedResourcePlan);
   }
 
-  private int handleWorkloadManagementServiceChange(WorkloadManager wm, boolean isActivate,
+  private int handleWMServiceChange(WorkloadManager wm, boolean isActivate,
       WMFullResourcePlan appliedResourcePlan) throws HiveException {
     String name = null;
     if (isActivate) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java
new file mode 100644
index 0000000..b355eee
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableAnalyzer.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.disable;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for disable resource plan commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_DISABLE)
+public class AlterResourcePlanDisableAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanDisableAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() > 1) {
+      throw new SemanticException("Incorrect syntax");
+    }
+
+    String resourcePlanName = root.getChildCount() == 0 ? null : unescapeIdentifier(root.getChild(0).getText());
+
+    AlterResourcePlanDisableDesc desc = new AlterResourcePlanDisableDesc(resourcePlanName);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableDesc.java
similarity index 56%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableDesc.java
index e43beee..e00237a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMPoolDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableDesc.java
@@ -16,30 +16,29 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.disable;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.metastore.api.WMPool;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for CREATE POOL commands.
+ * DDL task description for ALTER RESOURCE PLAN ... DISABLE or DISABLE RESOURCE PLAN commands.
  */
-@Explain(displayName = "Create Pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class CreateWMPoolDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = 4872940135771213510L;
+@Explain(displayName = "Disable Resource plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanDisableDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = -3514685833183437279L;
 
-  private final WMPool pool;
+  private final String resourcePlanName;
 
-  public CreateWMPoolDesc(WMPool pool) {
-    this.pool = pool;
+  public AlterResourcePlanDisableDesc(String resourcePlanName) {
+    this.resourcePlanName = resourcePlanName;
   }
 
-  @Explain(displayName="pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMPool getPool() {
-    return pool;
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableOperation.java
new file mode 100644
index 0000000..0af8983
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/AlterResourcePlanDisableOperation.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.disable;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.AbstractAlterResourcePlanStatusOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of disabling a resource plan.
+ */
+public class AlterResourcePlanDisableOperation
+    extends AbstractAlterResourcePlanStatusOperation<AlterResourcePlanDisableDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanDisableOperation(DDLOperationContext context, AlterResourcePlanDisableDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    boolean forceDeactivate = desc.getResourcePlanName() == null;
+
+    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
+    resourcePlan.setStatus(forceDeactivate ? WMResourcePlanStatus.ENABLED : WMResourcePlanStatus.DISABLED);
+
+    WMFullResourcePlan appliedResourcePlan = context.getDb().alterResourcePlan(desc.getResourcePlanName(), resourcePlan,
+        false, forceDeactivate, false);
+
+    handleWMServiceChangeIfNeeded(appliedResourcePlan, false, forceDeactivate, false);
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/package-info.java
index 4860ee7..4397c98 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/disable/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Disable Resource Plan DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.disable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java
new file mode 100644
index 0000000..29a46cb
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableAnalyzer.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.enable;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for enable resource plan commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_ENABLE)
+public class AlterResourcePlanEnableAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanEnableAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() == 0) {
+      console.printError("Activate a resource plan to enable workload management!");
+      return;
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+
+    boolean enable = false;
+    boolean activate = false;
+    boolean replace = false;
+
+    for (int i = 1; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      switch (child.getType()) {
+      case HiveParser.TOK_ACTIVATE:
+        activate = true;
+        if (child.getChildCount() > 1) {
+          throw new SemanticException("Expected 0 or 1 arguments " + root.toStringTree());
+        } else if (child.getChildCount() == 1) {
+          if (child.getChild(0).getType() != HiveParser.TOK_REPLACE) {
+            throw new SemanticException("Incorrect syntax " + root.toStringTree());
+          }
+          replace = true;
+        }
+        break;
+      case HiveParser.TOK_ENABLE:
+        enable = true;
+        break;
+      case HiveParser.TOK_REPLACE:
+        replace = true;
+        break;
+      default:
+        throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
+      }
+    }
+
+    AlterResourcePlanEnableDesc desc = new AlterResourcePlanEnableDesc(resourcePlanName, enable, activate, replace);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableDesc.java
new file mode 100644
index 0000000..22c8b86
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableDesc.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.enable;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER RESOURCE PLAN ... ENABLE or ENABLE WORKLOAD MANAGEMENT commands.
+ */
+@Explain(displayName = "Enable Resource plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanEnableDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String resourcePlanName;
+  private final boolean enable;
+  private final boolean activate;
+  private final boolean replace;
+
+  public AlterResourcePlanEnableDesc(String resourcePlanName, boolean enable, boolean activate, boolean replace) {
+    this.resourcePlanName = resourcePlanName;
+    this.enable = enable;
+    this.activate = activate;
+    this.replace = replace;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="Enable", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED },
+      displayOnlyOnTrue=true)
+  public boolean isEnable() {
+    return enable;
+  }
+
+  @Explain(displayName="Activate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED },
+      displayOnlyOnTrue=true)
+  public boolean isActivate() {
+    return activate;
+  }
+
+  @Explain(displayName="Replace", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED },
+      displayOnlyOnTrue=true)
+  public boolean isReplace() {
+    return replace;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableOperation.java
new file mode 100644
index 0000000..1b04977
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/AlterResourcePlanEnableOperation.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.enable;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.AbstractAlterResourcePlanStatusOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of enabling a resource plan.
+ */
+public class AlterResourcePlanEnableOperation
+    extends AbstractAlterResourcePlanStatusOperation<AlterResourcePlanEnableDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanEnableOperation(DDLOperationContext context, AlterResourcePlanEnableDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
+    resourcePlan.setStatus(desc.isActivate() ? WMResourcePlanStatus.ACTIVE : WMResourcePlanStatus.ENABLED);
+
+    boolean canActivateDisabled = desc.isEnable() && desc.isActivate() && !desc.isReplace();
+    WMFullResourcePlan appliedResourcePlan = context.getDb().alterResourcePlan(desc.getResourcePlanName(), resourcePlan,
+        canActivateDisabled, false, desc.isReplace());
+
+    handleWMServiceChangeIfNeeded(appliedResourcePlan, desc.isActivate(), false, desc.isReplace());
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/package-info.java
index 4860ee7..6631ad7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/enable/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Enable Resource Plan DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.enable;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/package-info.java
index 4860ee7..5bb2848 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Resource Plan DDL operations. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java
new file mode 100644
index 0000000..c746325
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameAnalyzer.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.rename;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter resource plan rename commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_RENAME)
+public class AlterResourcePlanRenameAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanRenameAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 2) {
+      throw new SemanticException("Expected two arguments");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String newResourcePlanName = unescapeIdentifier(root.getChild(1).getText());
+
+    AlterResourcePlanRenameDesc desc = new AlterResourcePlanRenameDesc(resourcePlanName, newResourcePlanName);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameDesc.java
new file mode 100644
index 0000000..c2793d0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameDesc.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.rename;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER RESOURCE PLAN ... RENAME ... commands.
+ */
+@Explain(displayName = "Rename Resource plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanRenameDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String resourcePlanName;
+  private final String newResourcePlanName;
+
+  public AlterResourcePlanRenameDesc(String resourcePlanName, String newResourcePlanName) {
+    this.resourcePlanName = resourcePlanName;
+    this.newResourcePlanName = newResourcePlanName;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="New resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getNewResourcePlanName() {
+    return newResourcePlanName;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameOperation.java
similarity index 50%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameOperation.java
index 513e0c9..2a47aad 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMMappingOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/AlterResourcePlanRenameOperation.java
@@ -16,26 +16,34 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.rename;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
- * Operation process of altering a workload management mapping.
+ * Operation process of altering a resource plan.
  */
-public class AlterWMMappingOperation extends DDLOperation<AlterWMMappingDesc> {
-  public AlterWMMappingOperation(DDLOperationContext context, AlterWMMappingDesc desc) {
+public class AlterResourcePlanRenameOperation extends DDLOperation<AlterResourcePlanRenameDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanRenameOperation(DDLOperationContext context, AlterResourcePlanRenameDesc desc) {
     super(context, desc);
   }
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().createOrUpdateWMMapping(desc.getMapping(), true);
+    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
+    resourcePlan.setName(desc.getNewResourcePlanName());
 
+    context.getDb().alterResourcePlan(desc.getResourcePlanName(), resourcePlan, false, false, false);
     return 0;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/package-info.java
index 4860ee7..2aeb183 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/rename/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Resource Plan Rename DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.rename;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java
new file mode 100644
index 0000000..b39c688
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceAnalyzer.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.replace;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for replace resource plan commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_REPLACE)
+public class AlterResourcePlanReplaceAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanReplaceAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() < 1 || root.getChildCount() > 2) {
+      throw new SemanticException("Incorrect syntax");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String destinationResourcePlan = root.getChildCount() == 2 ? unescapeIdentifier(root.getChild(1).getText()) : null;
+
+    AlterResourcePlanReplaceDesc desc = new AlterResourcePlanReplaceDesc(resourcePlanName, destinationResourcePlan);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceDesc.java
new file mode 100644
index 0000000..5b8db93
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceDesc.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.replace;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for REPLACE [ACTIVE] RESOURCE PLAN ... commands.
+ */
+@Explain(displayName = "Replace Resource plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanReplaceDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = -3514685833183437279L;
+
+  private final String resourcePlanName;
+  private final String destinationResourcePlanName;
+
+  public AlterResourcePlanReplaceDesc(String resourcePlanName, String destinationResourcePlanName) {
+    this.resourcePlanName = resourcePlanName;
+    this.destinationResourcePlanName = destinationResourcePlanName;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="Destination Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getDestinationResourcePlanName() {
+    return destinationResourcePlanName;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceOperation.java
new file mode 100644
index 0000000..8f82b55
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/AlterResourcePlanReplaceOperation.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.replace;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.AbstractAlterResourcePlanStatusOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of replacing a resource plan.
+ */
+public class AlterResourcePlanReplaceOperation
+    extends AbstractAlterResourcePlanStatusOperation<AlterResourcePlanReplaceDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanReplaceOperation(DDLOperationContext context, AlterResourcePlanReplaceDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
+    if (desc.getDestinationResourcePlanName() == null) {
+      resourcePlan.setStatus(WMResourcePlanStatus.ACTIVE);
+    } else {
+      resourcePlan.setName(desc.getDestinationResourcePlanName());
+    }
+
+    WMFullResourcePlan appliedResourcePlan = context.getDb().alterResourcePlan(desc.getResourcePlanName(), resourcePlan,
+        false, false, true);
+
+    handleWMServiceChangeIfNeeded(appliedResourcePlan, false, false, true);
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/package-info.java
index 4860ee7..40607f8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/replace/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Replace Resource Plan DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.replace;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java
new file mode 100644
index 0000000..a9025e0
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetAnalyzer.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.set;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter resource plan set commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_SET)
+public class AlterResourcePlanSetAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanSetAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+
+    Integer queryParallelism = null;
+    String defaultPool = null;
+    for (int i = 1; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      switch (child.getType()) {
+      case HiveParser.TOK_QUERY_PARALLELISM:
+        if (child.getChildCount() != 1) {
+          throw new SemanticException("Expected one argument");
+        }
+
+        queryParallelism = Integer.parseInt(child.getChild(0).getText());
+        break;
+      case HiveParser.TOK_DEFAULT_POOL:
+        if (child.getChildCount() != 1) {
+          throw new SemanticException("Expected one argument");
+        }
+
+        defaultPool = WMUtils.poolPath(child.getChild(0));
+        break;
+      default:
+        throw new SemanticException("Unexpected token in alter resource plan statement: " + child.getType());
+      }
+    }
+
+    AlterResourcePlanSetDesc desc = new AlterResourcePlanSetDesc(resourcePlanName, queryParallelism, defaultPool);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetDesc.java
new file mode 100644
index 0000000..7aa716b
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetDesc.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.set;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER RESOURCE PLAN ... SET ... commands.
+ */
+@Explain(displayName = "Alter Resource plan Set", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanSetDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = -3514685833183437279L;
+
+  private final String resourcePlanName;
+  private final Integer queryParallelism;
+  private final String defaultPool;
+
+  public AlterResourcePlanSetDesc(String resourcePlanName, Integer queryParallelism, String defaultPool) {
+    this.resourcePlanName = resourcePlanName;
+    this.queryParallelism = queryParallelism;
+    this.defaultPool = defaultPool;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="Query parallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public Integer getQueryParallelism() {
+    return queryParallelism;
+  }
+
+  @Explain(displayName="Default pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getDefaultPool() {
+    return defaultPool;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetOperation.java
new file mode 100644
index 0000000..58af58e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/AlterResourcePlanSetOperation.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.set;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of setting properties of a resource plan.
+ */
+public class AlterResourcePlanSetOperation extends DDLOperation<AlterResourcePlanSetDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanSetOperation(DDLOperationContext context, AlterResourcePlanSetDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
+
+    if (desc.getQueryParallelism() != null) {
+      resourcePlan.setIsSetQueryParallelism(true);
+      resourcePlan.setQueryParallelism(desc.getQueryParallelism());
+    }
+
+    if (desc.getDefaultPool() != null) {
+      resourcePlan.setIsSetDefaultPoolPath(true);
+      resourcePlan.setDefaultPoolPath(desc.getDefaultPool());
+    }
+
+    context.getDb().alterResourcePlan(desc.getResourcePlanName(), resourcePlan, false, false, false);
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/package-info.java
index 4860ee7..f1fdebc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/set/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Resource Plan Set DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.set;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java
new file mode 100644
index 0000000..8bb6039
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetAnalyzer.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.unset;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter resource plan unset commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_UNSET)
+public class AlterResourcePlanUnsetAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanUnsetAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+
+    boolean unsetQueryParallelism = false;
+    boolean unsetDefaultPool = false;
+    for (int i = 1; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      switch (child.getType()) {
+      case HiveParser.TOK_QUERY_PARALLELISM:
+        if (child.getChildCount() != 0) {
+          throw new SemanticException("Expected zero argument");
+        }
+
+        unsetQueryParallelism = true;
+        break;
+      case HiveParser.TOK_DEFAULT_POOL:
+        if (child.getChildCount() != 0) {
+          throw new SemanticException("Expected zero argument");
+        }
+
+        unsetDefaultPool = true;
+        break;
+      default:
+        throw new SemanticException(
+          "Unexpected token in alter resource plan statement: " + child.getType());
+      }
+    }
+
+    AlterResourcePlanUnsetDesc desc = new AlterResourcePlanUnsetDesc(resourcePlanName, unsetQueryParallelism,
+        unsetDefaultPool);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetDesc.java
new file mode 100644
index 0000000..83a9e70
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetDesc.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.unset;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER RESOURCE PLAN ... UNSET ... commands.
+ */
+@Explain(displayName = "Alter Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanUnsetDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = -3514685833183437279L;
+
+  private final String resourcePlanName;
+  private final boolean unsetQueryParallelism;
+  private final boolean unsetDefaultPool;
+
+  public AlterResourcePlanUnsetDesc(String resourcePlanName, boolean unsetQueryParallelism, boolean unsetDefaultPool) {
+    this.resourcePlanName = resourcePlanName;
+    this.unsetQueryParallelism = unsetQueryParallelism;
+    this.unsetDefaultPool = unsetDefaultPool;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="Unset Query parallelism", displayOnlyOnTrue=true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isUnsetQueryParallelism() {
+    return unsetQueryParallelism;
+  }
+
+  @Explain(displayName="Unset Default Pool", displayOnlyOnTrue=true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isUnsetDefaultPool() {
+    return unsetDefaultPool;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetOperation.java
new file mode 100644
index 0000000..04a63d3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/AlterResourcePlanUnsetOperation.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.unset;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of unsetting properties of a resource plan.
+ */
+public class AlterResourcePlanUnsetOperation extends DDLOperation<AlterResourcePlanUnsetDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanUnsetOperation(DDLOperationContext context, AlterResourcePlanUnsetDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
+
+    if (desc.isUnsetQueryParallelism()) {
+      resourcePlan.setIsSetQueryParallelism(true);
+      resourcePlan.unsetQueryParallelism();
+    }
+
+    if (desc.isUnsetDefaultPool()) {
+      resourcePlan.setIsSetDefaultPoolPath(true);
+      resourcePlan.unsetDefaultPoolPath();
+    }
+
+    context.getDb().alterResourcePlan(desc.getResourcePlanName(), resourcePlan, false, false, false);
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/package-info.java
index 4860ee7..754f46a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/unset/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Resource Plan Unset DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.unset;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java
new file mode 100644
index 0000000..fcabf08
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateAnalyzer.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.validate;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter resource plan validate commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_RP_VALIDATE)
+public class AlterResourcePlanValidateAnalyzer extends BaseSemanticAnalyzer {
+  public AlterResourcePlanValidateAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 1) {
+      throw new SemanticException("Incorrect syntax");
+    }
+
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+
+    AlterResourcePlanValidateDesc desc = new AlterResourcePlanValidateDesc(resourcePlanName, ctx.getResFile());
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    setFetchTask(createFetchTask(AlterResourcePlanValidateDesc.SCHEMA));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateDesc.java
similarity index 52%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateDesc.java
index e2cdcb5..cbcf80e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateDesc.java
@@ -16,43 +16,39 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.validate;
 
 import java.io.Serializable;
 
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DDL task description for SHOW RESOURCE PLAN(S) commands.
+ * DDL task description for ALTER RESOURCE PLAN ... VALIDATE commands.
  */
-@Explain(displayName = "Show Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-public class ShowResourcePlanDesc implements DDLDesc, Serializable {
-  private static final long serialVersionUID = 6076076933035978545L;
+@Explain(displayName = "Validate Resource Plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanValidateDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
 
-  private static final String ALL_SCHEMA = "rp_name,status,query_parallelism#string,string,int";
-  private static final String SINGLE_SCHEMA = "line#string";
+  public static final String SCHEMA = "error#string";
 
-  private final String planName;
-  private final String resFile;
+  private final String resourcePlanName;
+  private final Path resFile;
 
-  public ShowResourcePlanDesc(String planName, String resFile) {
-    this.planName = planName;
+  public AlterResourcePlanValidateDesc(String resourcePlanName, Path resFile) {
+    this.resourcePlanName = resourcePlanName;
     this.resFile = resFile;
   }
 
-  @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getResourcePlanName() {
-    return planName;
+    return resourcePlanName;
   }
 
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
-  public String getResFile() {
+  @Explain(displayName = "Result file", explainLevels = { Level.EXTENDED })
+  public Path getResFile() {
     return resFile;
   }
-
-  public String getSchema() {
-    return (planName == null) ? ALL_SCHEMA : SINGLE_SCHEMA;
-  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateOperation.java
new file mode 100644
index 0000000..bda4fe9
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/AlterResourcePlanValidateOperation.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.validate;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of validating a resource plan.
+ */
+public class AlterResourcePlanValidateOperation extends DDLOperation<AlterResourcePlanValidateDesc> {
+  // Note: the resource plan operations are going to be annotated with namespace based on the config
+  //       inside Hive.java. We don't want HS2 to be aware of namespaces beyond that, or to even see
+  //       that there exist other namespaces, because one HS2 always operates inside just one and we
+  //       don't want this complexity to bleed everywhere. Therefore, this code doesn't care about
+  //       namespaces - Hive.java will transparently scope everything. That's the idea anyway.
+  public AlterResourcePlanValidateOperation(DDLOperationContext context, AlterResourcePlanValidateDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException, IOException {
+    WMValidateResourcePlanResponse result = context.getDb().validateResourcePlan(desc.getResourcePlanName());
+    try (DataOutputStream out = DDLUtils.getOutputStream(desc.getResFile(), context)) {
+      context.getFormatter().showErrors(out, result);
+    } catch (IOException e) {
+      throw new HiveException(e);
+    }
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/package-info.java
index 4860ee7..f998608 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/alter/validate/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Resource Plan Validate DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.alter.validate;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java
new file mode 100644
index 0000000..a275a19
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanAnalyzer.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.create;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for create resource plan commands.
+ */
+@DDLType(type=HiveParser.TOK_CREATE_RP)
+public class CreateResourcePlanAnalyzer extends BaseSemanticAnalyzer {
+  public CreateResourcePlanAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() == 0) {
+      throw new SemanticException("Expected name in CREATE RESOURCE PLAN statement");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    Integer queryParallelism = null;
+    String likeName = null;
+    boolean ifNotExists = false;
+
+    for (int i = 1; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      switch (child.getType()) {
+      case HiveParser.TOK_QUERY_PARALLELISM:
+        // Note: later we may be able to set multiple things together (except LIKE).
+        if (queryParallelism == null && likeName == null) {
+          queryParallelism = Integer.parseInt(child.getChild(0).getText());
+        } else {
+          throw new SemanticException("Conflicting create arguments " + root.toStringTree());
+        }
+        break;
+      case HiveParser.TOK_LIKERP:
+        if (queryParallelism == null && likeName == null) {
+          likeName = unescapeIdentifier(child.getChild(0).getText());
+        } else {
+          throw new SemanticException("Conflicting create arguments " + root.toStringTree());
+        }
+        break;
+      case HiveParser.TOK_IFNOTEXISTS:
+        ifNotExists = true;
+        break;
+      default:
+        throw new SemanticException("Invalid create arguments " + root.toStringTree());
+      }
+    }
+
+    CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism, likeName, ifNotExists);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanDesc.java
similarity index 73%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanDesc.java
index 17d5083..181b1a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.create;
 
 import java.io.Serializable;
 
@@ -27,28 +27,29 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 /**
  * DDL task description for CREATE RESOURCE PLAN commands.
  */
-@Explain(displayName = "Create ResourcePlan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+@Explain(displayName = "Create Resource plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateResourcePlanDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = -3492803425541479414L;
 
-  private final String planName;
+  private final String resourcePlanName;
   private final Integer queryParallelism;
   private final String copyFromName;
   private final boolean ifNotExists;
 
-  public CreateResourcePlanDesc(String planName, Integer queryParallelism, String copyFromName, boolean ifNotExists) {
-    this.planName = planName;
+  public CreateResourcePlanDesc(String resourcePlanName, Integer queryParallelism, String copyFromName,
+      boolean ifNotExists) {
+    this.resourcePlanName = resourcePlanName;
     this.queryParallelism = queryParallelism;
     this.copyFromName = copyFromName;
     this.ifNotExists = ifNotExists;
   }
 
-  @Explain(displayName="planName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 
-  @Explain(displayName="queryParallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(displayName="Query parallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public Integer getQueryParallelism() {
     return queryParallelism;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanOperation.java
similarity index 91%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanOperation.java
index 95ee6da..f75935b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateResourcePlanOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/CreateResourcePlanOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.create;
 
 import java.io.IOException;
 
@@ -35,7 +35,7 @@ public class CreateResourcePlanOperation extends DDLOperation<CreateResourcePlan
 
   @Override
   public int execute() throws HiveException, IOException {
-    WMResourcePlan plan = new WMResourcePlan(desc.getPlanName());
+    WMResourcePlan plan = new WMResourcePlan(desc.getResourcePlanName());
     if (desc.getQueryParallelism() != null) {
       plan.setQueryParallelism(desc.getQueryParallelism());
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/package-info.java
index 4860ee7..3b4ab0e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/create/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Create Resource Plan DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.create;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java
new file mode 100644
index 0000000..8af7930
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanAnalyzer.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.drop;
+
+import org.antlr.runtime.tree.Tree;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for drop resource plan commands.
+ */
+@DDLType(type=HiveParser.TOK_DROP_RP)
+public class DropResourcePlanAnalyzer extends BaseSemanticAnalyzer {
+  public DropResourcePlanAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() == 0) {
+      throw new SemanticException("Expected name in DROP RESOURCE PLAN statement");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    boolean ifExists = false;
+
+    for (int i = 1; i < root.getChildCount(); ++i) {
+      Tree child = root.getChild(i);
+      switch (child.getType()) {
+      case HiveParser.TOK_IFEXISTS:
+        ifExists = true;
+        break;
+      default:
+        throw new SemanticException("Invalid create arguments " + root.toStringTree());
+      }
+    }
+
+    DropResourcePlanDesc desc = new DropResourcePlanDesc(resourcePlanName, ifExists);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanDesc.java
similarity index 68%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanDesc.java
index ef7c723..af23166 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.drop;
 
 import java.io.Serializable;
 
@@ -27,24 +27,24 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 /**
  * DDL task description for DROP RESOURCE PLAN commands.
  */
-@Explain(displayName = "Drop Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+@Explain(displayName = "Drop Resource plan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class DropResourcePlanDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1258596919510047766L;
 
-  private final String planName;
+  private final String resourcePlanName;
   private final boolean ifExists;
 
-  public DropResourcePlanDesc(String planName, boolean ifExists) {
-    this.planName = planName;
+  public DropResourcePlanDesc(String resourcePlanName, boolean ifExists) {
+    this.resourcePlanName = resourcePlanName;
     this.ifExists = ifExists;
   }
 
-  @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 
-  @Explain(displayName="ifExists", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED },
+  @Explain(displayName="If exists", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED },
       displayOnlyOnTrue = true)
   public boolean getIfExists() {
     return ifExists;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanOperation.java
similarity index 89%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanOperation.java
index 964e989..b29d495 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropResourcePlanOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/DropResourcePlanOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.drop;
 
 import java.io.IOException;
 
@@ -34,7 +34,7 @@ public class DropResourcePlanOperation extends DDLOperation<DropResourcePlanDesc
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().dropResourcePlan(desc.getPlanName(), desc.getIfExists());
+    context.getDb().dropResourcePlan(desc.getResourcePlanName(), desc.getIfExists());
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/package-info.java
index 4860ee7..4890548 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/drop/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Drop Resource Plan DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java
new file mode 100644
index 0000000..440e575
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanAnalyzer.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.show;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for show resource plan commands.
+ */
+@DDLType(type=HiveParser.TOK_SHOW_RP)
+public class ShowResourcePlanAnalyzer extends BaseSemanticAnalyzer {
+  public ShowResourcePlanAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() > 1) {
+      throw new SemanticException("Invalid syntax for SHOW RESOURCE PLAN statement");
+    }
+
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    String resourcePlanName = (root.getChildCount() == 0) ? null : unescapeIdentifier(root.getChild(0).getText());
+
+    ShowResourcePlanDesc desc = new ShowResourcePlanDesc(resourcePlanName, ctx.getResFile().toString());
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    setFetchTask(createFetchTask(desc.getSchema()));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanDesc.java
similarity index 76%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanDesc.java
index e2cdcb5..187be6c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.show;
 
 import java.io.Serializable;
 
@@ -34,25 +34,25 @@ public class ShowResourcePlanDesc implements DDLDesc, Serializable {
   private static final String ALL_SCHEMA = "rp_name,status,query_parallelism#string,string,int";
   private static final String SINGLE_SCHEMA = "line#string";
 
-  private final String planName;
+  private final String resourcePlanName;
   private final String resFile;
 
-  public ShowResourcePlanDesc(String planName, String resFile) {
-    this.planName = planName;
+  public ShowResourcePlanDesc(String resourcePlanName, String resFile) {
+    this.resourcePlanName = resourcePlanName;
     this.resFile = resFile;
   }
 
-  @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(displayName="Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getResourcePlanName() {
-    return planName;
+    return resourcePlanName;
   }
 
-  @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
+  @Explain(displayName = "Result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
 
   public String getSchema() {
-    return (planName == null) ? ALL_SCHEMA : SINGLE_SCHEMA;
+    return (resourcePlanName == null) ? ALL_SCHEMA : SINGLE_SCHEMA;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanOperation.java
similarity index 89%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanOperation.java
index aa586fc..8a5739b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/ShowResourcePlanOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/ShowResourcePlanOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.show;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
@@ -39,9 +39,9 @@ public class ShowResourcePlanOperation extends DDLOperation<ShowResourcePlanDesc
   public int execute() throws HiveException, IOException {
     // TODO: Enhance showResourcePlan to display all the pools, triggers and mappings.
     try (DataOutputStream out = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) {
-      String planName = desc.getResourcePlanName();
-      if (planName != null) {
-        context.getFormatter().showFullResourcePlan(out, context.getDb().getResourcePlan(planName));
+      String resourcePlanName = desc.getResourcePlanName();
+      if (resourcePlanName != null) {
+        context.getFormatter().showFullResourcePlan(out, context.getDb().getResourcePlan(resourcePlanName));
       } else {
         context.getFormatter().showResourcePlans(out, context.getDb().getAllResourcePlans());
       }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/package-info.java
index 4860ee7..80918f1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/resourceplan/show/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Show Resource Plan DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.resourceplan.show;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/TriggerUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/TriggerUtils.java
new file mode 100644
index 0000000..27848b9
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/TriggerUtils.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger;
+
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
+
+/**
+ * Common utilities for Trigger related ddl operations.
+ */
+public final class TriggerUtils {
+  private TriggerUtils() {
+    throw new UnsupportedOperationException("TriggerUtils should not be instantiated");
+  }
+
+  public static String buildTriggerExpression(ASTNode node) throws SemanticException {
+    if (node.getType() != HiveParser.TOK_TRIGGER_EXPRESSION || node.getChildCount() == 0) {
+      throw new SemanticException("Invalid trigger expression.");
+    }
+
+    StringBuilder builder = new StringBuilder();
+    for (int i = 0; i < node.getChildCount(); ++i) {
+      builder.append(node.getChild(i).getText()); // Don't strip quotes.
+      builder.append(' ');
+    }
+    builder.deleteCharAt(builder.length() - 1);
+    return builder.toString();
+  }
+
+  public static String buildTriggerActionExpression(ASTNode node) throws SemanticException {
+    switch (node.getType()) {
+    case HiveParser.KW_KILL:
+      return "KILL";
+    case HiveParser.KW_MOVE:
+      if (node.getChildCount() != 1) {
+        throw new SemanticException("Invalid move to clause in trigger action.");
+      }
+      String poolPath = WMUtils.poolPath(node.getChild(0));
+      return "MOVE TO " + poolPath;
+    default:
+      throw new SemanticException("Unknown token in action clause: " + node.getType());
+    }
+  }
+
+  public static void validateTrigger(WMTrigger trigger) throws HiveException {
+    try {
+      ExecutionTrigger.fromWMTrigger(trigger);
+    } catch (IllegalArgumentException e) {
+      throw new HiveException(e);
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java
new file mode 100644
index 0000000..80f8db4
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerAnalyzer.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.alter;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.TriggerUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter trigger commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_TRIGGER)
+public class AlterWMTriggerAnalyzer extends BaseSemanticAnalyzer {
+  public AlterWMTriggerAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 4) {
+      throw new SemanticException("Invalid syntax for alter trigger statement");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String triggerName = unescapeIdentifier(root.getChild(1).getText());
+    String triggerExpression = TriggerUtils.buildTriggerExpression((ASTNode)root.getChild(2));
+    String actionExpression = TriggerUtils.buildTriggerActionExpression((ASTNode)root.getChild(3));
+
+    AlterWMTriggerDesc desc = new AlterWMTriggerDesc(resourcePlanName, triggerName, triggerExpression,
+        actionExpression);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerDesc.java
new file mode 100644
index 0000000..a033a92
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerDesc.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.alter;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER TRIGGER commands.
+ */
+@Explain(displayName="Alter WM Trigger", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterWMTriggerDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String resourcePlanName;
+  private final String triggerName;
+  private final String triggerExpression;
+  private final String actionExpression;
+
+  public AlterWMTriggerDesc(String resourcePlanName, String triggerName, String triggerExpression,
+      String actionExpression) {
+    this.resourcePlanName = resourcePlanName;
+    this.triggerName = triggerName;
+    this.triggerExpression = triggerExpression;
+    this.actionExpression = actionExpression;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="Trigger name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getTriggerName() {
+    return triggerName;
+  }
+
+  @Explain(displayName="Trigger expression", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getTriggerExpression() {
+    return triggerExpression;
+  }
+
+  @Explain(displayName="Action expression", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getActionExpression() {
+    return actionExpression;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerOperation.java
similarity index 72%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerOperation.java
index 34b2e33..44d2733 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterWMTriggerOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/AlterWMTriggerOperation.java
@@ -16,12 +16,14 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.alter;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.TriggerUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
@@ -34,8 +36,12 @@ public class AlterWMTriggerOperation extends DDLOperation<AlterWMTriggerDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    WMUtils.validateTrigger(desc.getTrigger());
-    context.getDb().alterWMTrigger(desc.getTrigger());
+    WMTrigger trigger = new WMTrigger(desc.getResourcePlanName(), desc.getTriggerName());
+    trigger.setTriggerExpression(desc.getTriggerExpression());
+    trigger.setActionExpression(desc.getActionExpression());
+
+    TriggerUtils.validateTrigger(trigger);
+    context.getDb().alterWMTrigger(trigger);
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/package-info.java
index 4860ee7..1f39b17 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/alter/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Alter Trigger DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.alter;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java
new file mode 100644
index 0000000..4eb0d14
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerAnalyzer.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.create;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.TriggerUtils;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for create trigger commands.
+ */
+@DDLType(type=HiveParser.TOK_CREATE_TRIGGER)
+public class CreateWMTriggerAnalyzer extends BaseSemanticAnalyzer {
+  public CreateWMTriggerAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 4) {
+      throw new SemanticException("Invalid syntax for create trigger statement");
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String triggerName = unescapeIdentifier(root.getChild(1).getText());
+    String triggerExpression = TriggerUtils.buildTriggerExpression((ASTNode)root.getChild(2));
+    String actionExpression = TriggerUtils.buildTriggerActionExpression((ASTNode)root.getChild(3));
+
+    CreateWMTriggerDesc desc = new CreateWMTriggerDesc(resourcePlanName, triggerName, triggerExpression,
+        actionExpression);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerDesc.java
new file mode 100644
index 0000000..6d218be
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerDesc.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.create;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for CREATE TRIGGER commands.
+ */
+@Explain(displayName="Create WM Trigger", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateWMTriggerDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String resourcePlanName;
+  private final String triggerName;
+  private final String triggerExpression;
+  private final String actionExpression;
+
+  public CreateWMTriggerDesc(String resourcePlanName, String triggerName, String triggerExpression,
+      String actionExpression) {
+    this.resourcePlanName = resourcePlanName;
+    this.triggerName = triggerName;
+    this.triggerExpression = triggerExpression;
+    this.actionExpression = actionExpression;
+  }
+
+  @Explain(displayName="Resource plan name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  @Explain(displayName="Trigger name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getTriggerName() {
+    return triggerName;
+  }
+
+  @Explain(displayName="Trigger expression", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getTriggerExpression() {
+    return triggerExpression;
+  }
+
+  @Explain(displayName="Action expression", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getActionExpression() {
+    return actionExpression;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerOperation.java
similarity index 72%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerOperation.java
index 5fc0e08..b44988f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/CreateWMTriggerOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/CreateWMTriggerOperation.java
@@ -16,12 +16,14 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.create;
 
 import java.io.IOException;
 
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.TriggerUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
@@ -34,8 +36,12 @@ public class CreateWMTriggerOperation extends DDLOperation<CreateWMTriggerDesc>
 
   @Override
   public int execute() throws HiveException, IOException {
-    WMUtils.validateTrigger(desc.getTrigger());
-    context.getDb().createWMTrigger(desc.getTrigger());
+    WMTrigger trigger = new WMTrigger(desc.getResourcePlanName(), desc.getTriggerName());
+    trigger.setTriggerExpression(desc.getTriggerExpression());
+    trigger.setActionExpression(desc.getActionExpression());
+
+    TriggerUtils.validateTrigger(trigger);
+    context.getDb().createWMTrigger(trigger);
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/package-info.java
index 4860ee7..3f72467 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/create/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Create Trigger DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.create;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java
new file mode 100644
index 0000000..7f7a08e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerAnalyzer.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for drop trigger commands.
+ */
+@DDLType(type=HiveParser.TOK_DROP_TRIGGER)
+public class DropWMTriggerAnalyzer extends BaseSemanticAnalyzer {
+  public DropWMTriggerAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 2) {
+      throw new SemanticException("Invalid syntax for drop trigger.");
+    }
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+    String triggerName = unescapeIdentifier(root.getChild(1).getText());
+
+    DropWMTriggerDesc desc = new DropWMTriggerDesc(resourcePlanName, triggerName);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerDesc.java
similarity index 73%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerDesc.java
index 7096706..7139f78 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.drop;
 
 import java.io.Serializable;
 
@@ -31,20 +31,20 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class DropWMTriggerDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 963803766313787632L;
 
-  private final String planName;
+  private final String resourcePlanName;
   private final String triggerName;
 
-  public DropWMTriggerDesc(String planName, String triggerName) {
-    this.planName = planName;
+  public DropWMTriggerDesc(String resourcePlanName, String triggerName) {
+    this.resourcePlanName = resourcePlanName;
     this.triggerName = triggerName;
   }
 
-  @Explain(displayName="resourcePlanName", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
+  @Explain(displayName="Resource plan name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 
-  @Explain(displayName="triggerName", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(displayName="Trigger name", explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
   public String getTriggerName() {
     return triggerName;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerOperation.java
similarity index 89%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerOperation.java
index 7c94215..5796be7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/DropWMTriggerOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/DropWMTriggerOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.drop;
 
 import java.io.IOException;
 
@@ -34,7 +34,7 @@ public class DropWMTriggerOperation extends DDLOperation<DropWMTriggerDesc> {
 
   @Override
   public int execute() throws HiveException, IOException {
-    context.getDb().dropWMTrigger(desc.getPlanName(), desc.getTriggerName());
+    context.getDb().dropWMTrigger(desc.getResourcePlanName(), desc.getTriggerName());
 
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/package-info.java
index 4860ee7..a8fabfb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/drop/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Create Trigger DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/package-info.java
index 4860ee7..707cfca 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Trigger DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java
new file mode 100644
index 0000000..c3aa94c
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerAnalyzer.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.add;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter pool add trigger commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_POOL_ADD_TRIGGER)
+public class AlterPoolAddTriggerAnalyzer extends BaseSemanticAnalyzer {
+  public AlterPoolAddTriggerAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 3) {
+      throw new SemanticException("Invalid syntax for alter pool add trigger: " + root.toStringTree());
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+
+    String poolPath = root.getChild(1).getType() == HiveParser.TOK_UNMANAGED ?
+        null : WMUtils.poolPath(root.getChild(1));
+    String triggerName = unescapeIdentifier(root.getChild(2).getText());
+
+    AlterPoolAddTriggerDesc desc = new AlterPoolAddTriggerDesc(resourcePlanName, triggerName, poolPath);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerDesc.java
similarity index 74%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerDesc.java
index 5aef9fa..772ffa5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerDesc.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.add;
 
 import java.io.Serializable;
 
@@ -30,21 +30,19 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class AlterPoolAddTriggerDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 383046258694558029L;
 
-  private final String planName;
+  private final String resourcePlanName;
   private final String triggerName;
   private final String poolPath;
-  private final boolean isUnmanagedPool;
 
-  public AlterPoolAddTriggerDesc(String planName, String triggerName, String poolPath, boolean isUnmanagedPool) {
-    this.planName = planName;
+  public AlterPoolAddTriggerDesc(String resourcePlanName, String triggerName, String poolPath) {
+    this.resourcePlanName = resourcePlanName;
     this.triggerName = triggerName;
     this.poolPath = poolPath;
-    this.isUnmanagedPool = isUnmanagedPool;
   }
 
-  @Explain(displayName = "resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 
   @Explain(displayName = "Trigger name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -52,16 +50,12 @@ public class AlterPoolAddTriggerDesc implements DDLDesc, Serializable {
     return triggerName;
   }
 
-  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPoolPathForExplain() {
-    return isUnmanagedPool ? "<unmanaged queries>" : poolPath;
-  }
-
   public String getPoolPath() {
     return poolPath;
   }
 
-  public boolean isUnmanagedPool() {
-    return isUnmanagedPool;
+  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPathForExplain() {
+    return poolPath == null ? "<unmanaged queries>" : poolPath;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerOperation.java
similarity index 81%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerOperation.java
index 7893455..8ac0277 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolAddTriggerOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/AlterPoolAddTriggerOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.add;
 
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 
@@ -36,12 +36,11 @@ public class AlterPoolAddTriggerOperation extends DDLOperation<AlterPoolAddTrigg
 
   @Override
   public int execute() throws HiveException, IOException {
-    if (!desc.isUnmanagedPool()) {
-      context.getDb().createOrDropTriggerToPoolMapping(desc.getPlanName(), desc.getTriggerName(), desc.getPoolPath(),
-          false);
+    if (desc.getPoolPath() != null) {
+      context.getDb().createOrDropTriggerToPoolMapping(desc.getResourcePlanName(), desc.getTriggerName(),
+          desc.getPoolPath(), false);
     } else {
-      assert desc.getPoolPath() == null;
-      WMTrigger trigger = new WMTrigger(desc.getPlanName(), desc.getTriggerName());
+      WMTrigger trigger = new WMTrigger(desc.getResourcePlanName(), desc.getTriggerName());
       // If we are dropping from unmanaged, unset the flag; and vice versa
       trigger.setIsInUnmanaged(true);
       context.getDb().alterWMTrigger(trigger);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/package-info.java
index 4860ee7..66268fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/add/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Add Trigger to Pool DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.add;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java
new file mode 100644
index 0000000..cce878f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerAnalyzer.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.ddl.workloadmanagement.WMUtils;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for alter pool drop trigger commands.
+ */
+@DDLType(type=HiveParser.TOK_ALTER_POOL_DROP_TRIGGER)
+public class AlterPoolDropTriggerAnalyzer extends BaseSemanticAnalyzer {
+  public AlterPoolDropTriggerAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() != 3) {
+      throw new SemanticException("Invalid syntax for alter pool add trigger: " + root.toStringTree());
+    }
+
+    String resourcePlanName = unescapeIdentifier(root.getChild(0).getText());
+
+    String poolPath = root.getChild(1).getType() == HiveParser.TOK_UNMANAGED ?
+        null : WMUtils.poolPath(root.getChild(1));
+    String triggerName = unescapeIdentifier(root.getChild(2).getText());
+
+    AlterPoolDropTriggerDesc desc = new AlterPoolDropTriggerDesc(resourcePlanName, triggerName, poolPath);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
+
+    DDLUtils.addServiceOutput(conf, getOutputs());
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerDesc.java
similarity index 74%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerDesc.java
index e6942fc..ad40146 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerDesc.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.drop;
 
 import java.io.Serializable;
 
@@ -31,21 +31,19 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class AlterPoolDropTriggerDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 383046258694558029L;
 
-  private final String planName;
+  private final String resourcePlanName;
   private final String triggerName;
   private final String poolPath;
-  private final boolean isUnmanagedPool;
 
-  public AlterPoolDropTriggerDesc(String planName, String triggerName, String poolPath, boolean isUnmanagedPool) {
-    this.planName = planName;
+  public AlterPoolDropTriggerDesc(String resourcePlanName, String triggerName, String poolPath) {
+    this.resourcePlanName = resourcePlanName;
     this.triggerName = triggerName;
     this.poolPath = poolPath;
-    this.isUnmanagedPool = isUnmanagedPool;
   }
 
-  @Explain(displayName = "resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPlanName() {
-    return planName;
+  @Explain(displayName = "Resource plan name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
   }
 
   @Explain(displayName = "Trigger name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -53,16 +51,12 @@ public class AlterPoolDropTriggerDesc implements DDLDesc, Serializable {
     return triggerName;
   }
 
-  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getPoolPathForExplain() {
-    return isUnmanagedPool ? "<unmanaged queries>" : poolPath;
-  }
-
   public String getPoolPath() {
     return poolPath;
   }
 
-  public boolean isUnmanagedPool() {
-    return isUnmanagedPool;
+  @Explain(displayName = "Pool path", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPathForExplain() {
+    return poolPath == null ? "<unmanaged queries>" : poolPath;
   }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerOperation.java
similarity index 82%
rename from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java
rename to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerOperation.java
index 4a8b404..d3d3539 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/AlterPoolDropTriggerOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/AlterPoolDropTriggerOperation.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.drop;
 
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 
@@ -36,12 +36,11 @@ public class AlterPoolDropTriggerOperation extends DDLOperation<AlterPoolDropTri
 
   @Override
   public int execute() throws HiveException, IOException {
-    if (!desc.isUnmanagedPool()) {
-      context.getDb().createOrDropTriggerToPoolMapping(desc.getPlanName(), desc.getTriggerName(), desc.getPoolPath(),
-          true);
+    if (desc.getPoolPath() != null) {
+      context.getDb().createOrDropTriggerToPoolMapping(desc.getResourcePlanName(), desc.getTriggerName(),
+          desc.getPoolPath(), true);
     } else {
-      assert desc.getPoolPath() == null;
-      WMTrigger trigger = new WMTrigger(desc.getPlanName(), desc.getTriggerName());
+      WMTrigger trigger = new WMTrigger(desc.getResourcePlanName(), desc.getTriggerName());
       // If we are dropping from unmanaged, unset the flag; and vice versa
       trigger.setIsInUnmanaged(false);
       context.getDb().alterWMTrigger(trigger);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/package-info.java
similarity index 55%
copy from ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/package-info.java
index 4860ee7..6a2b6b4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/WMUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/workloadmanagement/trigger/pool/drop/package-info.java
@@ -16,25 +16,5 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.ddl.workloadmanagement;
-
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.wm.ExecutionTrigger;
-
-/**
- * Common utilities for Workload Management related ddl operations.
- */
-final class WMUtils {
-  private WMUtils() {
-    throw new UnsupportedOperationException("WMUtils should not be instantiated");
-  }
-
-  static void validateTrigger(WMTrigger trigger) throws HiveException {
-    try {
-      ExecutionTrigger.fromWMTrigger(trigger);
-    } catch (IllegalArgumentException e) {
-      throw new HiveException(e);
-    }
-  }
-}
+/** Drop Trigger from Pool DDL operation. */
+package org.apache.hadoop.hive.ql.ddl.workloadmanagement.trigger.pool.drop;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 023369f..1865d77 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -54,12 +54,6 @@ import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
-import org.apache.hadoop.hive.metastore.api.WMMapping;
-import org.apache.hadoop.hive.metastore.api.WMNullablePool;
-import org.apache.hadoop.hive.metastore.api.WMNullableResourcePlan;
-import org.apache.hadoop.hive.metastore.api.WMPool;
-import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
-import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.Driver;
@@ -121,28 +115,12 @@ import org.apache.hadoop.hive.ql.ddl.table.storage.AlterTableUnarchiveDesc;
 import org.apache.hadoop.hive.ql.ddl.view.AlterMaterializedViewRewriteDesc;
 import org.apache.hadoop.hive.ql.ddl.view.DropMaterializedViewDesc;
 import org.apache.hadoop.hive.ql.ddl.view.DropViewDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterPoolAddTriggerDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterPoolDropTriggerDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterResourcePlanDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterWMMappingDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterWMPoolDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.AlterWMTriggerDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.CreateResourcePlanDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.CreateWMMappingDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.CreateWMPoolDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.CreateWMTriggerDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.DropResourcePlanDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.DropWMMappingDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.DropWMPoolDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.DropWMTriggerDesc;
-import org.apache.hadoop.hive.ql.ddl.workloadmanagement.ShowResourcePlanDesc;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
@@ -474,46 +452,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
    case HiveParser.TOK_CACHE_METADATA:
      analyzeCacheMetadata(ast);
      break;
-   case HiveParser.TOK_CREATE_RP:
-     analyzeCreateResourcePlan(ast);
-     break;
-   case HiveParser.TOK_SHOW_RP:
-     ctx.setResFile(ctx.getLocalTmpPath());
-     analyzeShowResourcePlan(ast);
-     break;
-   case HiveParser.TOK_ALTER_RP:
-     analyzeAlterResourcePlan(ast);
-     break;
-   case HiveParser.TOK_DROP_RP:
-     analyzeDropResourcePlan(ast);
-     break;
-   case HiveParser.TOK_CREATE_TRIGGER:
-     analyzeCreateTrigger(ast);
-     break;
-   case HiveParser.TOK_ALTER_TRIGGER:
-     analyzeAlterTrigger(ast);
-     break;
-   case HiveParser.TOK_DROP_TRIGGER:
-     analyzeDropTrigger(ast);
-     break;
-   case HiveParser.TOK_CREATE_POOL:
-     analyzeCreatePool(ast);
-     break;
-   case HiveParser.TOK_ALTER_POOL:
-     analyzeAlterPool(ast);
-     break;
-   case HiveParser.TOK_DROP_POOL:
-     analyzeDropPool(ast);
-     break;
-   case HiveParser.TOK_CREATE_MAPPING:
-     analyzeCreateOrAlterMapping(ast, false);
-     break;
-   case HiveParser.TOK_ALTER_MAPPING:
-     analyzeCreateOrAlterMapping(ast, true);
-     break;
-   case HiveParser.TOK_DROP_MAPPING:
-     analyzeDropMapping(ast);
-     break;
    default:
       throw new SemanticException("Unsupported command: " + ast);
     }
@@ -656,450 +594,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     return counter;
   }
 
-  private void analyzeCreateResourcePlan(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() == 0) {
-      throw new SemanticException("Expected name in CREATE RESOURCE PLAN statement");
-    }
-    String resourcePlanName = unescapeIdentifier(ast.getChild(0).getText());
-    Integer queryParallelism = null;
-    String likeName = null;
-    boolean ifNotExists = false;
-    for (int i = 1; i < ast.getChildCount(); ++i) {
-      Tree child = ast.getChild(i);
-      switch (child.getType()) {
-      case HiveParser.TOK_QUERY_PARALLELISM:
-        // Note: later we may be able to set multiple things together (except LIKE).
-        if (queryParallelism == null && likeName == null) {
-          queryParallelism = Integer.parseInt(child.getChild(0).getText());
-        } else {
-          throw new SemanticException("Conflicting create arguments " + ast.toStringTree());
-        }
-        break;
-      case HiveParser.TOK_LIKERP:
-        if (queryParallelism == null && likeName == null) {
-          likeName = unescapeIdentifier(child.getChild(0).getText());
-        } else {
-          throw new SemanticException("Conflicting create arguments " + ast.toStringTree());
-        }
-        break;
-      case HiveParser.TOK_IFNOTEXISTS:
-        ifNotExists = true;
-        break;
-      default: throw new SemanticException("Invalid create arguments " + ast.toStringTree());
-      }
-    }
-    CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism, likeName, ifNotExists);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeShowResourcePlan(ASTNode ast) throws SemanticException {
-    String rpName = null;
-    if (ast.getChildCount() > 0) {
-      rpName = unescapeIdentifier(ast.getChild(0).getText());
-    }
-    if (ast.getChildCount() > 1) {
-      throw new SemanticException("Invalid syntax for SHOW RESOURCE PLAN statement");
-    }
-    ShowResourcePlanDesc showResourcePlanDesc = new ShowResourcePlanDesc(rpName, ctx.getResFile().toString());
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showResourcePlanDesc)));
-    setFetchTask(createFetchTask(showResourcePlanDesc.getSchema()));
-  }
-
-  private void analyzeAlterResourcePlan(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() < 1) {
-      throw new SemanticException("Incorrect syntax");
-    }
-    Tree nameOrGlobal = ast.getChild(0);
-    switch (nameOrGlobal.getType()) {
-    case HiveParser.TOK_ENABLE:
-      // This command exists solely to output this message. TODO: can we do it w/o an error?
-      throw new SemanticException("Activate a resource plan to enable workload management");
-    case HiveParser.TOK_DISABLE:
-      WMNullableResourcePlan anyRp = new WMNullableResourcePlan();
-      anyRp.setStatus(WMResourcePlanStatus.ENABLED);
-      AlterResourcePlanDesc desc = new AlterResourcePlanDesc(anyRp, null, false, false, true, false, null);
-      addServiceOutput();
-      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-      return;
-    default: // Continue to handle changes to a specific plan.
-    }
-    if (ast.getChildCount() < 2) {
-      throw new SemanticException("Invalid syntax for ALTER RESOURCE PLAN statement");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    WMNullableResourcePlan resourcePlan = new WMNullableResourcePlan();
-    boolean isEnableActivate = false, isReplace = false;
-    boolean validate = false;
-    for (int i = 1; i < ast.getChildCount(); ++i) {
-      Tree child = ast.getChild(i);
-      switch (child.getType()) {
-      case HiveParser.TOK_VALIDATE:
-        validate = true;
-        break;
-      case HiveParser.TOK_ACTIVATE:
-        if (resourcePlan.getStatus() == WMResourcePlanStatus.ENABLED) {
-          isEnableActivate = true;
-        }
-        if (child.getChildCount() > 1) {
-          throw new SemanticException("Expected 0 or 1 arguments " + ast.toStringTree());
-        } else if (child.getChildCount() == 1) {
-          if (child.getChild(0).getType() != HiveParser.TOK_REPLACE) {
-            throw new SemanticException("Incorrect syntax " + ast.toStringTree());
-          }
-          isReplace = true;
-          isEnableActivate = false; // Implied.
-        }
-        resourcePlan.setStatus(WMResourcePlanStatus.ACTIVE);
-        break;
-      case HiveParser.TOK_ENABLE:
-        if (resourcePlan.getStatus() == WMResourcePlanStatus.ACTIVE) {
-          isEnableActivate = !isReplace;
-        } else {
-          resourcePlan.setStatus(WMResourcePlanStatus.ENABLED);
-        }
-        break;
-      case HiveParser.TOK_DISABLE:
-        resourcePlan.setStatus(WMResourcePlanStatus.DISABLED);
-        break;
-      case HiveParser.TOK_REPLACE:
-        isReplace = true;
-        if (child.getChildCount() > 1) {
-          throw new SemanticException("Expected 0 or 1 arguments " + ast.toStringTree());
-        } else if (child.getChildCount() == 1) {
-          // Replace is essentially renaming a plan to the name of an existing plan, with backup.
-          resourcePlan.setName(unescapeIdentifier(child.getChild(0).getText()));
-        } else {
-          resourcePlan.setStatus(WMResourcePlanStatus.ACTIVE);
-        }
-        break;
-      case HiveParser.TOK_QUERY_PARALLELISM: {
-        if (child.getChildCount() != 1) {
-          throw new SemanticException("Expected one argument");
-        }
-        Tree val = child.getChild(0);
-        resourcePlan.setIsSetQueryParallelism(true);
-        if (val.getType() == HiveParser.TOK_NULL) {
-          resourcePlan.unsetQueryParallelism();
-        } else {
-          resourcePlan.setQueryParallelism(Integer.parseInt(val.getText()));
-        }
-        break;
-      }
-      case HiveParser.TOK_DEFAULT_POOL: {
-        if (child.getChildCount() != 1) {
-          throw new SemanticException("Expected one argument");
-        }
-        Tree val = child.getChild(0);
-        resourcePlan.setIsSetDefaultPoolPath(true);
-        if (val.getType() == HiveParser.TOK_NULL) {
-          resourcePlan.unsetDefaultPoolPath();
-        } else {
-          resourcePlan.setDefaultPoolPath(poolPath(child.getChild(0)));
-        }
-        break;
-      }
-      case HiveParser.TOK_RENAME:
-        if (child.getChildCount() != 1) {
-          throw new SemanticException("Expected one argument");
-        }
-        resourcePlan.setName(unescapeIdentifier(child.getChild(0).getText()));
-        break;
-      default:
-        throw new SemanticException(
-          "Unexpected token in alter resource plan statement: " + child.getType());
-      }
-    }
-    String resFile = null;
-    if (validate) {
-      ctx.setResFile(ctx.getLocalTmpPath());
-      resFile = ctx.getResFile().toString();
-    }
-    AlterResourcePlanDesc desc = new AlterResourcePlanDesc(resourcePlan, rpName, validate, isEnableActivate, false,
-        isReplace, resFile);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-    if (validate) {
-      setFetchTask(createFetchTask(AlterResourcePlanDesc.SCHEMA));
-    }
-  }
-
-  private void analyzeDropResourcePlan(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() == 0) {
-      throw new SemanticException("Expected name in DROP RESOURCE PLAN statement");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    boolean ifExists = false;
-    for (int i = 1; i < ast.getChildCount(); ++i) {
-      Tree child = ast.getChild(i);
-      switch (child.getType()) {
-      case HiveParser.TOK_IFEXISTS:
-        ifExists = true;
-        break;
-      default: throw new SemanticException("Invalid create arguments " + ast.toStringTree());
-      }
-    }
-    DropResourcePlanDesc desc = new DropResourcePlanDesc(rpName, ifExists);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeCreateTrigger(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() != 4) {
-      throw new SemanticException("Invalid syntax for create trigger statement");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String triggerName = unescapeIdentifier(ast.getChild(1).getText());
-    String triggerExpression = buildTriggerExpression((ASTNode)ast.getChild(2));
-    String actionExpression = buildTriggerActionExpression((ASTNode)ast.getChild(3));
-
-    WMTrigger trigger = new WMTrigger(rpName, triggerName);
-    trigger.setTriggerExpression(triggerExpression);
-    trigger.setActionExpression(actionExpression);
-
-    CreateWMTriggerDesc desc = new CreateWMTriggerDesc(trigger);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private String buildTriggerExpression(ASTNode ast) throws SemanticException {
-    if (ast.getType() != HiveParser.TOK_TRIGGER_EXPRESSION || ast.getChildCount() == 0) {
-      throw new SemanticException("Invalid trigger expression.");
-    }
-    StringBuilder builder = new StringBuilder();
-    for (int i = 0; i < ast.getChildCount(); ++i) {
-      builder.append(ast.getChild(i).getText()); // Don't strip quotes.
-      builder.append(' ');
-    }
-    builder.deleteCharAt(builder.length() - 1);
-    return builder.toString();
-  }
-
-  private String poolPath(Tree ast) {
-    StringBuilder builder = new StringBuilder();
-    builder.append(unescapeIdentifier(ast.getText()));
-    for (int i = 0; i < ast.getChildCount(); ++i) {
-      // DOT is not affected
-      builder.append(unescapeIdentifier(ast.getChild(i).getText()));
-    }
-    return builder.toString();
-  }
-
-  private String buildTriggerActionExpression(ASTNode ast) throws SemanticException {
-    switch (ast.getType()) {
-    case HiveParser.KW_KILL:
-      return "KILL";
-    case HiveParser.KW_MOVE:
-      if (ast.getChildCount() != 1) {
-        throw new SemanticException("Invalid move to clause in trigger action.");
-      }
-      String poolPath = poolPath(ast.getChild(0));
-      return "MOVE TO " + poolPath;
-    default:
-      throw new SemanticException("Unknown token in action clause: " + ast.getType());
-    }
-  }
-
-  private void analyzeAlterTrigger(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() != 4) {
-      throw new SemanticException("Invalid syntax for alter trigger statement");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String triggerName = unescapeIdentifier(ast.getChild(1).getText());
-    String triggerExpression = buildTriggerExpression((ASTNode)ast.getChild(2));
-    String actionExpression = buildTriggerActionExpression((ASTNode)ast.getChild(3));
-
-    WMTrigger trigger = new WMTrigger(rpName, triggerName);
-    trigger.setTriggerExpression(triggerExpression);
-    trigger.setActionExpression(actionExpression);
-
-    AlterWMTriggerDesc desc = new AlterWMTriggerDesc(trigger);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeDropTrigger(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() != 2) {
-      throw new SemanticException("Invalid syntax for drop trigger.");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String triggerName = unescapeIdentifier(ast.getChild(1).getText());
-
-    DropWMTriggerDesc desc = new DropWMTriggerDesc(rpName, triggerName);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeCreatePool(ASTNode ast) throws SemanticException {
-    // TODO: allow defaults for e.g. scheduling policy.
-    if (ast.getChildCount() < 3) {
-      throw new SemanticException("Expected more arguments: " + ast.toStringTree());
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String poolPath = poolPath(ast.getChild(1));
-    WMPool pool = new WMPool(rpName, poolPath);
-    for (int i = 2; i < ast.getChildCount(); ++i) {
-      Tree child = ast.getChild(i);
-      if (child.getChildCount() != 1) {
-        throw new SemanticException("Expected 1 paramter for: " + child.getText());
-      }
-      String param = child.getChild(0).getText();
-      switch (child.getType()) {
-      case HiveParser.TOK_ALLOC_FRACTION:
-        pool.setAllocFraction(Double.parseDouble(param));
-        break;
-      case HiveParser.TOK_QUERY_PARALLELISM:
-        pool.setQueryParallelism(Integer.parseInt(param));
-        break;
-      case HiveParser.TOK_SCHEDULING_POLICY:
-        String schedulingPolicyStr = PlanUtils.stripQuotes(param);
-        if (!MetaStoreUtils.isValidSchedulingPolicy(schedulingPolicyStr)) {
-          throw new SemanticException("Invalid scheduling policy " + schedulingPolicyStr);
-        }
-        pool.setSchedulingPolicy(schedulingPolicyStr);
-        break;
-      case HiveParser.TOK_PATH:
-        throw new SemanticException("Invalid parameter path in create pool");
-      }
-    }
-    if (!pool.isSetAllocFraction()) {
-      throw new SemanticException("alloc_fraction should be specified for a pool");
-    }
-    if (!pool.isSetQueryParallelism()) {
-      throw new SemanticException("query_parallelism should be specified for a pool");
-    }
-    CreateWMPoolDesc desc = new CreateWMPoolDesc(pool);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeAlterPool(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() < 3) {
-      throw new SemanticException("Invalid syntax for alter pool: " + ast.toStringTree());
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    Tree poolTarget = ast.getChild(1);
-
-    boolean isUnmanagedPool = false;
-    String poolPath = null;
-    if (poolTarget.getType() == HiveParser.TOK_UNMANAGED) {
-      isUnmanagedPool = true;
-    } else {
-      poolPath = poolPath(ast.getChild(1));
-    }
-
-    WMNullablePool poolChanges = null;
-    boolean hasTrigger = false;
-    for (int i = 2; i < ast.getChildCount(); ++i) {
-      Tree child = ast.getChild(i);
-      if (child.getChildCount() != 1) {
-        throw new SemanticException("Invalid syntax in alter pool expected parameter.");
-      }
-      Tree param = child.getChild(0);
-      if (child.getType() == HiveParser.TOK_ADD_TRIGGER
-          || child.getType() == HiveParser.TOK_DROP_TRIGGER) {
-        hasTrigger = true;
-        boolean drop = child.getType() == HiveParser.TOK_DROP_TRIGGER;
-        String triggerName = unescapeIdentifier(param.getText());
-        if (drop) {
-          rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-              new AlterPoolDropTriggerDesc(rpName, triggerName, poolPath, isUnmanagedPool))));
-        } else {
-          rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-              new AlterPoolAddTriggerDesc(rpName, triggerName, poolPath, isUnmanagedPool))));
-        }
-      } else {
-        if (isUnmanagedPool) {
-          throw new SemanticException("Cannot alter the unmanaged pool");
-        }
-        if (poolChanges == null) {
-          poolChanges = new WMNullablePool(rpName, null);
-        }
-        switch (child.getType()) {
-        case HiveParser.TOK_ALLOC_FRACTION:
-          poolChanges.setAllocFraction(Double.parseDouble(param.getText()));
-          break;
-        case HiveParser.TOK_QUERY_PARALLELISM:
-          poolChanges.setQueryParallelism(Integer.parseInt(param.getText()));
-          break;
-        case HiveParser.TOK_SCHEDULING_POLICY:
-          poolChanges.setIsSetSchedulingPolicy(true);
-          if (param.getType() != HiveParser.TOK_NULL) {
-            poolChanges.setSchedulingPolicy(PlanUtils.stripQuotes(param.getText()));
-          }
-          break;
-        case HiveParser.TOK_PATH:
-          poolChanges.setPoolPath(poolPath(param));
-          break;
-        default: throw new SemanticException("Incorrect alter syntax: " + child.toStringTree());
-        }
-      }
-    }
-
-    if (poolChanges != null || hasTrigger) {
-      addServiceOutput();
-    }
-    if (poolChanges != null) {
-      if (!poolChanges.isSetPoolPath()) {
-        poolChanges.setPoolPath(poolPath);
-      }
-      AlterWMPoolDesc ddlDesc = new AlterWMPoolDesc(poolChanges, poolPath);
-      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), ddlDesc)));
-    }
-  }
-
-  private void analyzeDropPool(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() != 2) {
-      throw new SemanticException("Invalid syntax for drop pool.");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String poolPath = poolPath(ast.getChild(1));
-
-    DropWMPoolDesc desc = new DropWMPoolDesc(rpName, poolPath);
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeCreateOrAlterMapping(ASTNode ast, boolean update) throws SemanticException {
-    if (ast.getChildCount() < 4) {
-      throw new SemanticException("Invalid syntax for create or alter mapping.");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String entityType = ast.getChild(1).getText();
-    String entityName = PlanUtils.stripQuotes(ast.getChild(2).getText());
-    WMMapping mapping = new WMMapping(rpName, entityType, entityName);
-    Tree dest = ast.getChild(3);
-    if (dest.getType() != HiveParser.TOK_UNMANAGED) {
-      mapping.setPoolPath(poolPath(dest));
-    } // Null path => unmanaged
-    if (ast.getChildCount() == 5) {
-      mapping.setOrdering(Integer.valueOf(ast.getChild(4).getText()));
-    }
-
-    org.apache.hadoop.hive.ql.ddl.DDLDesc desc = null;
-    if (update) {
-      desc = new AlterWMMappingDesc(mapping);
-    } else {
-      desc = new CreateWMMappingDesc(mapping);
-    }
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
-  private void analyzeDropMapping(ASTNode ast) throws SemanticException {
-    if (ast.getChildCount() != 3) {
-      throw new SemanticException("Invalid syntax for drop mapping.");
-    }
-    String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    String entityType = ast.getChild(1).getText();
-    String entityName = PlanUtils.stripQuotes(ast.getChild(2).getText());
-
-    DropWMMappingDesc desc = new DropWMMappingDesc(new WMMapping(rpName, entityType, entityName));
-    addServiceOutput();
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
-  }
-
   private void analyzeDropTable(ASTNode ast) throws SemanticException {
     String tableName = getUnescapedName((ASTNode) ast.getChild(0));
     boolean ifExists = (ast.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null);
@@ -2566,13 +2060,6 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
     ctx.setNeedLockMgr(true);
   }
 
-  private void addServiceOutput() throws SemanticException {
-    String hs2Hostname = getHS2Host();
-    if (hs2Hostname != null) {
-      outputs.add(new WriteEntity(hs2Hostname, Type.SERVICE_NAME));
-    }
-  }
-
   private String getHS2Host() throws SemanticException {
     if (SessionState.get().isHiveServerQuery()) {
       return SessionState.get().getHiveServer2Host();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 6e7ed4d..d56c8c6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -417,9 +417,14 @@ TOK_BLOCKING;
 TOK_KILL_QUERY;
 TOK_CREATE_RP;
 TOK_SHOW_RP;
-TOK_ALTER_RP;
+TOK_ALTER_RP_ENABLE;
+TOK_ALTER_RP_DISABLE;
+TOK_ALTER_RP_RENAME;
+TOK_ALTER_RP_SET;
+TOK_ALTER_RP_UNSET;
+TOK_ALTER_RP_REPLACE;
+TOK_ALTER_RP_VALIDATE;
 TOK_DROP_RP;
-TOK_VALIDATE;
 TOK_ACTIVATE;
 TOK_QUERY_PARALLELISM;
 TOK_RENAME;
@@ -430,6 +435,8 @@ TOK_DROP_TRIGGER;
 TOK_TRIGGER_EXPRESSION;
 TOK_CREATE_POOL;
 TOK_ALTER_POOL;
+TOK_ALTER_POOL_ADD_TRIGGER;
+TOK_ALTER_POOL_DROP_TRIGGER;
 TOK_DROP_POOL;
 TOK_ALLOC_FRACTION;
 TOK_SCHEDULING_POLICY;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
index 0479c78..0460c37 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
@@ -76,11 +76,11 @@ rpAssignList
   ;
 
 rpUnassign
-@init { gParent.pushMsg("rpAssign", state); }
+@init { gParent.pushMsg("rpUnassign", state); }
 @after { gParent.popMsg(state); }
   : (
-      (KW_QUERY_PARALLELISM) -> ^(TOK_QUERY_PARALLELISM TOK_NULL)
-    | (KW_DEFAULT KW_POOL) -> ^(TOK_DEFAULT_POOL TOK_NULL)
+      (KW_QUERY_PARALLELISM) -> ^(TOK_QUERY_PARALLELISM)
+    | (KW_DEFAULT KW_POOL) -> ^(TOK_DEFAULT_POOL)
     )
   ;
 
@@ -111,12 +111,12 @@ alterResourcePlanStatement
 @init { gParent.pushMsg("alter resource plan statement", state); }
 @after { gParent.popMsg(state); }
     : KW_ALTER KW_RESOURCE KW_PLAN name=identifier (
-          (KW_VALIDATE -> ^(TOK_ALTER_RP $name TOK_VALIDATE))
-        | (KW_DISABLE -> ^(TOK_ALTER_RP $name TOK_DISABLE))
-        | (KW_SET rpAssignList -> ^(TOK_ALTER_RP $name rpAssignList))
-        | (KW_UNSET rpUnassignList -> ^(TOK_ALTER_RP $name rpUnassignList))
-        | (KW_RENAME KW_TO newName=identifier -> ^(TOK_ALTER_RP $name ^(TOK_RENAME $newName)))
-        | ((activate enable? | enable activate?) -> ^(TOK_ALTER_RP $name activate? enable?))
+          (KW_VALIDATE -> ^(TOK_ALTER_RP_VALIDATE $name))
+        | (KW_DISABLE -> ^(TOK_ALTER_RP_DISABLE $name))
+        | (KW_SET rpAssignList -> ^(TOK_ALTER_RP_SET $name rpAssignList))
+        | (KW_UNSET rpUnassignList -> ^(TOK_ALTER_RP_UNSET $name rpUnassignList))
+        | (KW_RENAME KW_TO newName=identifier -> ^(TOK_ALTER_RP_RENAME $name $newName))
+        | ((activate enable? | enable activate?) -> ^(TOK_ALTER_RP_ENABLE $name activate? enable?))
       )
     ;
 
@@ -125,15 +125,16 @@ alterResourcePlanStatement
 globalWmStatement
 @init { gParent.pushMsg("global WM statement", state); }
 @after { gParent.popMsg(state); }
-    : (enable | disable) KW_WORKLOAD KW_MANAGEMENT -> ^(TOK_ALTER_RP enable? disable?)
+    : KW_ENABLE KW_WORKLOAD KW_MANAGEMENT -> ^(TOK_ALTER_RP_ENABLE)
+    | KW_DISABLE KW_WORKLOAD KW_MANAGEMENT -> ^(TOK_ALTER_RP_DISABLE)
     ;
 
 replaceResourcePlanStatement
 @init { gParent.pushMsg("replace resource plan statement", state); }
 @after { gParent.popMsg(state); }
     : KW_REPLACE (
-          (KW_ACTIVE KW_RESOURCE KW_PLAN KW_WITH src=identifier -> ^(TOK_ALTER_RP $src TOK_REPLACE))
-        | (KW_RESOURCE KW_PLAN dest=identifier KW_WITH src=identifier -> ^(TOK_ALTER_RP $src ^(TOK_REPLACE $dest)))
+          (KW_ACTIVE KW_RESOURCE KW_PLAN KW_WITH src=identifier -> ^(TOK_ALTER_RP_REPLACE $src))
+        | (KW_RESOURCE KW_PLAN dest=identifier KW_WITH src=identifier -> ^(TOK_ALTER_RP_REPLACE $src $dest))
       )
     ;
 
@@ -216,10 +217,10 @@ alterTriggerStatement
     : KW_ALTER KW_TRIGGER rpName=identifier DOT triggerName=identifier (
         (KW_WHEN triggerExpression KW_DO triggerActionExpression
           -> ^(TOK_ALTER_TRIGGER $rpName $triggerName triggerExpression triggerActionExpression))
-      | (KW_ADD KW_TO KW_POOL poolName=poolPath -> ^(TOK_ALTER_POOL $rpName $poolName ^(TOK_ADD_TRIGGER $triggerName)))
-      | (KW_DROP KW_FROM KW_POOL poolName=poolPath -> ^(TOK_ALTER_POOL $rpName $poolName ^(TOK_DROP_TRIGGER $triggerName)))
-      | (KW_ADD KW_TO KW_UNMANAGED -> ^(TOK_ALTER_POOL $rpName TOK_UNMANAGED ^(TOK_ADD_TRIGGER $triggerName)))
-      | (KW_DROP KW_FROM KW_UNMANAGED -> ^(TOK_ALTER_POOL $rpName TOK_UNMANAGED ^(TOK_DROP_TRIGGER $triggerName)))
+      | (KW_ADD KW_TO KW_POOL poolName=poolPath -> ^(TOK_ALTER_POOL_ADD_TRIGGER $rpName $poolName $triggerName))
+      | (KW_DROP KW_FROM KW_POOL poolName=poolPath -> ^(TOK_ALTER_POOL_DROP_TRIGGER $rpName $poolName  $triggerName))
+      | (KW_ADD KW_TO KW_UNMANAGED -> ^(TOK_ALTER_POOL_ADD_TRIGGER $rpName TOK_UNMANAGED $triggerName))
+      | (KW_DROP KW_FROM KW_UNMANAGED -> ^(TOK_ALTER_POOL_DROP_TRIGGER $rpName TOK_UNMANAGED $triggerName))
     )
     ;
 
@@ -262,10 +263,8 @@ alterPoolStatement
     : KW_ALTER KW_POOL rpName=identifier DOT poolPath (
         (KW_SET poolAssignList -> ^(TOK_ALTER_POOL $rpName poolPath poolAssignList))
         | (KW_UNSET KW_SCHEDULING_POLICY -> ^(TOK_ALTER_POOL $rpName poolPath ^(TOK_SCHEDULING_POLICY TOK_NULL)))
-        | (KW_ADD KW_TRIGGER triggerName=identifier
-            -> ^(TOK_ALTER_POOL $rpName poolPath ^(TOK_ADD_TRIGGER $triggerName)))
-        | (KW_DROP KW_TRIGGER triggerName=identifier
-            -> ^(TOK_ALTER_POOL $rpName poolPath ^(TOK_DROP_TRIGGER $triggerName)))
+        | (KW_ADD KW_TRIGGER triggerName=identifier -> ^(TOK_ALTER_POOL_ADD_TRIGGER $rpName poolPath $triggerName))
+        | (KW_DROP KW_TRIGGER triggerName=identifier -> ^(TOK_ALTER_POOL_DROP_TRIGGER $rpName poolPath $triggerName))
       )
     ;
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 4f95c51..45dec5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -141,19 +141,6 @@ public final class SemanticAnalyzerFactory {
       case HiveParser.TOK_UNLOCKTABLE:
       case HiveParser.TOK_TRUNCATETABLE:
       case HiveParser.TOK_CACHE_METADATA:
-      case HiveParser.TOK_CREATE_RP:
-      case HiveParser.TOK_SHOW_RP:
-      case HiveParser.TOK_ALTER_RP:
-      case HiveParser.TOK_DROP_RP:
-      case HiveParser.TOK_CREATE_TRIGGER:
-      case HiveParser.TOK_ALTER_TRIGGER:
-      case HiveParser.TOK_DROP_TRIGGER:
-      case HiveParser.TOK_CREATE_POOL:
-      case HiveParser.TOK_ALTER_POOL:
-      case HiveParser.TOK_DROP_POOL:
-      case HiveParser.TOK_CREATE_MAPPING:
-      case HiveParser.TOK_ALTER_MAPPING:
-      case HiveParser.TOK_DROP_MAPPING:
         return new DDLSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_ANALYZE:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index f1ab99e..193eff9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -186,13 +186,16 @@ public enum HiveOperation {
   KILL_QUERY("KILL QUERY", HiveParser.TOK_KILL_QUERY, null, null),
   CREATE_RESOURCEPLAN("CREATE RESOURCEPLAN", HiveParser.TOK_CREATE_RP, null, null, false, false),
   SHOW_RESOURCEPLAN("SHOW RESOURCEPLAN", HiveParser.TOK_SHOW_RP, null, null, false, false),
-  ALTER_RESOURCEPLAN("ALTER RESOURCEPLAN", HiveParser.TOK_ALTER_RP, null, null, false, false),
+  ALTER_RESOURCEPLAN("ALTER RESOURCEPLAN", new int[] {HiveParser.TOK_ALTER_RP_VALIDATE, HiveParser.TOK_ALTER_RP_RENAME,
+      HiveParser.TOK_ALTER_RP_SET, HiveParser.TOK_ALTER_RP_UNSET, HiveParser.TOK_ALTER_RP_ENABLE,
+      HiveParser.TOK_ALTER_RP_DISABLE, HiveParser.TOK_ALTER_RP_REPLACE}, null, null, false, false),
   DROP_RESOURCEPLAN("DROP RESOURCEPLAN", HiveParser.TOK_DROP_RP, null, null, false, false),
   CREATE_TRIGGER("CREATE TRIGGER", HiveParser.TOK_CREATE_TRIGGER, null, null, false, false),
   ALTER_TRIGGER("ALTER TRIGGER", HiveParser.TOK_ALTER_TRIGGER, null, null, false, false),
   DROP_TRIGGER("DROP TRIGGER", HiveParser.TOK_DROP_TRIGGER, null, null, false, false),
   CREATE_POOL("CREATE POOL", HiveParser.TOK_CREATE_POOL, null, null, false, false),
-  ALTER_POOL("ALTER POOL", HiveParser.TOK_ALTER_POOL, null, null, false, false),
+  ALTER_POOL("ALTER POOL", new int[] {HiveParser.TOK_ALTER_POOL, HiveParser.TOK_ALTER_POOL_ADD_TRIGGER,
+      HiveParser.TOK_ALTER_POOL_DROP_TRIGGER}, null, null, false, false),
   DROP_POOL("DROP POOL", HiveParser.TOK_DROP_POOL, null, null, false, false),
   CREATE_MAPPING("CREATE MAPPING", HiveParser.TOK_CREATE_MAPPING, null, null, false, false),
   ALTER_MAPPING("ALTER MAPPING", HiveParser.TOK_ALTER_MAPPING, null, null, false, false),
diff --git a/ql/src/test/queries/clientpositive/resourceplan.q b/ql/src/test/queries/clientpositive/resourceplan.q
index 8bc5697..0466b91 100644
--- a/ql/src/test/queries/clientpositive/resourceplan.q
+++ b/ql/src/test/queries/clientpositive/resourceplan.q
@@ -22,6 +22,10 @@ source ../../metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql;
 SHOW RESOURCE PLANS;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
+-- Show how ENABLE WORKLOAD MANAGEMENT not works :)
+EXPLAIN ENABLE WORKLOAD MANAGEMENT;
+ENABLE WORKLOAD MANAGEMENT;
+
 -- Create and show plan_1.
 CREATE RESOURCE PLAN plan_1;
 EXPLAIN SHOW RESOURCE PLANS;
@@ -115,6 +119,7 @@ ALTER RESOURCE PLAN plan_3 DISABLE;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
 -- DISABLE WM - ok.
+EXPLAIN DISABLE WORKLOAD MANAGEMENT;
 DISABLE WORKLOAD MANAGEMENT;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
@@ -262,6 +267,7 @@ CREATE POOL plan_2.default.c2 WITH
     QUERY_PARALLELISM=2, SCHEDULING_POLICY='fair', ALLOC_FRACTION=0.75;
 
 -- Cannot activate c1 + c2 = 1.0
+EXPLAIN ALTER RESOURCE PLAN plan_2 VALIDATE;
 ALTER RESOURCE PLAN plan_2 VALIDATE;
 ALTER RESOURCE PLAN plan_2 ENABLE ACTIVATE;
 
@@ -316,7 +322,16 @@ SELECT * FROM SYS.WM_POOLS;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
 -- Changed default pool, now it should work.
-ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool;
+EXPLAIN ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=2;
+ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=2;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+EXPLAIN ALTER RESOURCE PLAN `table` UNSET DEFAULT POOL, QUERY_PARALLELISM;
+ALTER RESOURCE PLAN `table` UNSET DEFAULT POOL, QUERY_PARALLELISM;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=1;
+
 DROP POOL `table`.default;
 SELECT * FROM SYS.WM_POOLS;
 
@@ -439,6 +454,7 @@ SELECT * FROM SYS.WM_TRIGGERS;
 SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS;
 SELECT * FROM SYS.WM_MAPPINGS;
 
+EXPLAIN REPLACE RESOURCE PLAN plan_4a WITH plan_4b;
 REPLACE RESOURCE PLAN plan_4a WITH plan_4b;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 SELECT * FROM SYS.WM_POOLS;
@@ -447,6 +463,7 @@ REPLACE ACTIVE RESOURCE PLAN WITH plan_4a;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 CREATE RESOURCE PLAN plan_4a LIKE plan_4;
 CREATE POOL plan_4a.pool3 WITH SCHEDULING_POLICY='fair', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.0;
+EXPLAIN ALTER RESOURCE PLAN plan_4a ENABLE ACTIVATE WITH REPLACE;
 ALTER RESOURCE PLAN plan_4a ENABLE ACTIVATE WITH REPLACE;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 SELECT * FROM SYS.WM_POOLS;
diff --git a/ql/src/test/results/clientpositive/llap/resourceplan.q.out b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
index a3b6956..5752d40 100644
--- a/ql/src/test/results/clientpositive/llap/resourceplan.q.out
+++ b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
@@ -3588,6 +3588,19 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
+Activate a resource plan to enable workload management!
+PREHOOK: query: EXPLAIN ENABLE WORKLOAD MANAGEMENT
+PREHOOK: type: ALTER RESOURCEPLAN
+POSTHOOK: query: EXPLAIN ENABLE WORKLOAD MANAGEMENT
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+
+STAGE PLANS:
+Activate a resource plan to enable workload management!
+PREHOOK: query: ENABLE WORKLOAD MANAGEMENT
+PREHOOK: type: ALTER RESOURCEPLAN
+POSTHOOK: query: ENABLE WORKLOAD MANAGEMENT
+POSTHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: query: CREATE RESOURCE PLAN plan_1
 PREHOOK: type: CREATE RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3630,7 +3643,7 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Show Resource plans
-      resourcePlanName: plan_1
+      Resource plan name: plan_1
 
   Stage: Stage-1
     Fetch Operator
@@ -3665,9 +3678,9 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
-    Create ResourcePlan
-      planName: plan_2
-      queryParallelism: 5
+    Create Resource plan
+      Query parallelism: 5
+      Resource plan name: plan_2
 
 PREHOOK: query: CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM=5
 PREHOOK: type: CREATE RESOURCEPLAN
@@ -3684,10 +3697,9 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
-    Alter Resource plans
-      Resource plan to modify: plan_2
-      Resource plan changed fields:
-      shouldValidate: false
+    Alter Resource plan Set
+      Query parallelism: 10
+      Resource plan name: plan_2
 
 PREHOOK: query: ALTER RESOURCE PLAN plan_2 SET QUERY_PARALLELISM=10
 PREHOOK: type: ALTER RESOURCEPLAN
@@ -3798,10 +3810,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
-    Alter Resource plans
-      Resource plan to modify: plan_3
-      Resource plan changed fields:
-      shouldValidate: false
+    Alter Resource plan Set
+      Default pool: default1
+      Query parallelism: 30
+      Resource plan name: plan_3
 
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = default1
 PREHOOK: type: ALTER RESOURCEPLAN
@@ -3827,10 +3839,9 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
-    Alter Resource plans
-      Resource plan to modify: plan_3
-      Resource plan changed fields:
-      shouldValidate: false
+    Enable Resource plan
+      Resource plan name: plan_3
+      Enable: true
 
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
@@ -3847,10 +3858,9 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
-    Alter Resource plans
-      Resource plan to modify: plan_3
-      Resource plan changed fields:
-      shouldValidate: false
+    Rename Resource plan
+      New resource plan name: plan_4
+      Resource plan name: plan_3
 
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 RENAME TO plan_4
 PREHOOK: type: ALTER RESOURCEPLAN
@@ -3977,6 +3987,18 @@ POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2	default	DISABLED	10	default
 plan_3	default	ACTIVE	NULL	default
+PREHOOK: query: EXPLAIN DISABLE WORKLOAD MANAGEMENT
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: EXPLAIN DISABLE WORKLOAD MANAGEMENT
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Disable Resource plan
+
 PREHOOK: query: DISABLE WORKLOAD MANAGEMENT
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -4082,8 +4104,8 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
-    Drop Resource plans
-      resourcePlanName: plan_2
+    Drop Resource plan
+      Resource plan name: plan_2
 
 PREHOOK: query: DROP RESOURCE PLAN plan_2
 PREHOOK: type: DROP RESOURCEPLAN
@@ -4180,7 +4202,10 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Create WM Trigger
-      trigger:
+      Action expression: KILL
+      Resource plan name: plan_1
+      Trigger expression: BYTES_READ > '10kb'
+      Trigger name: trigger_1
 
 PREHOOK: query: CREATE TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '10kb' DO KILL
 PREHOOK: type: CREATE TRIGGER
@@ -4244,7 +4269,10 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Alter WM Trigger
-      trigger:
+      Action expression: KILL
+      Resource plan name: plan_1
+      Trigger expression: BYTES_READ > '1GB'
+      Trigger name: trigger_1
 
 PREHOOK: query: ALTER TRIGGER plan_1.trigger_1 WHEN BYTES_READ > '1GB' DO KILL
 PREHOOK: type: ALTER TRIGGER
@@ -4272,8 +4300,8 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Drop WM Trigger
-      resourcePlanName: plan_1
-      triggerName: trigger_1
+      Resource plan name: plan_1
+      Trigger name: trigger_1
 
 PREHOOK: query: DROP TRIGGER plan_1.trigger_1
 PREHOOK: type: DROP TRIGGER
@@ -4435,7 +4463,11 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Create Pool
-      pool:
+      Alloc fraction: 1.0
+      Pool path: default
+      Query parallelism: 5
+      Resource plan name: plan_1
+      Scheduling policy: default
 
 PREHOOK: query: CREATE POOL plan_1.default WITH
    ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'
@@ -4459,7 +4491,11 @@ POSTHOOK: Input: sys@wm_pools
 plan_1	default	default	1.0	4	NULL
 plan_2	default	default	1.0	5	NULL
 table	default	default	1.0	4	NULL
-FAILED: SemanticException Invalid scheduling policy invalid
+PREHOOK: query: CREATE POOL plan_2.default.c1 WITH
+    ALLOC_FRACTION=0.3, QUERY_PARALLELISM=3, SCHEDULING_POLICY='invalid'
+PREHOOK: type: CREATE POOL
+PREHOOK: Output: dummyHostnameForTest
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.ddl.DDLTask. Invalid scheduling policy invalid
 PREHOOK: query: CREATE POOL plan_2.default.c1 WITH
     ALLOC_FRACTION=0.3, QUERY_PARALLELISM=3, SCHEDULING_POLICY='fair'
 PREHOOK: type: CREATE POOL
@@ -4474,6 +4510,26 @@ PREHOOK: Output: dummyHostnameForTest
 POSTHOOK: query: CREATE POOL plan_2.default.c2 WITH
     QUERY_PARALLELISM=2, SCHEDULING_POLICY='fair', ALLOC_FRACTION=0.75
 POSTHOOK: type: CREATE POOL
+PREHOOK: query: EXPLAIN ALTER RESOURCE PLAN plan_2 VALIDATE
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: EXPLAIN ALTER RESOURCE PLAN plan_2 VALIDATE
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Validate Resource Plan
+      Resource plan name: plan_2
+
+  Stage: Stage-1
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: ALTER RESOURCE PLAN plan_2 VALIDATE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -4495,8 +4551,10 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Alter Pool
-      pool:
-      poolPath: default.c2
+      Alloc fraction: 0.7
+      Pool path: default.c2
+      Query parallelism: 1
+      Resource plan name: plan_2
 
 PREHOOK: query: ALTER POOL plan_2.default.c2 SET ALLOC_FRACTION = 0.7, QUERY_PARALLELISM = 1
 PREHOOK: type: ALTER POOL
@@ -4589,7 +4647,8 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Drop WM Pool
-      poolName: plan_2
+      Pool path: default
+      Resource plan name: plan_2
 
 PREHOOK: query: DROP POOL plan_2.default
 PREHOOK: type: DROP POOL
@@ -4738,10 +4797,72 @@ POSTHOOK: Input: sys@wm_resourceplans
 plan_1	default	ACTIVE	NULL	default
 plan_2	default	DISABLED	10	def
 table	default	DISABLED	1	default
-PREHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool
+PREHOOK: query: EXPLAIN ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=2
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
-POSTHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool
+POSTHOOK: query: EXPLAIN ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=2
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Alter Resource plan Set
+      Default pool: table.pool
+      Query parallelism: 2
+      Resource plan name: table
+
+PREHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=2
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=2
+POSTHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+plan_1	default	ACTIVE	NULL	default
+plan_2	default	DISABLED	10	def
+table	default	DISABLED	2	table.pool
+PREHOOK: query: EXPLAIN ALTER RESOURCE PLAN `table` UNSET DEFAULT POOL, QUERY_PARALLELISM
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: EXPLAIN ALTER RESOURCE PLAN `table` UNSET DEFAULT POOL, QUERY_PARALLELISM
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Alter Resource plans
+      Resource plan name: table
+      Unset Default Pool: true
+      Unset Query parallelism: true
+
+PREHOOK: query: ALTER RESOURCE PLAN `table` UNSET DEFAULT POOL, QUERY_PARALLELISM
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: ALTER RESOURCE PLAN `table` UNSET DEFAULT POOL, QUERY_PARALLELISM
+POSTHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+plan_1	default	ACTIVE	NULL	default
+plan_2	default	DISABLED	10	def
+table	default	DISABLED	NULL	NULL
+PREHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=1
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool, QUERY_PARALLELISM=1
 POSTHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: query: DROP POOL `table`.default
 PREHOOK: type: DROP POOL
@@ -4791,8 +4912,8 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Create Trigger to pool mappings
-      resourcePlanName: plan_2
       Pool path: def.c1
+      Resource plan name: plan_2
       Trigger name: trigger_1
 
 PREHOOK: query: ALTER POOL plan_2.def.c1 ADD TRIGGER trigger_1
@@ -4927,8 +5048,8 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Drop Trigger to pool mappings
-      resourcePlanName: plan_2
       Pool path: def.c1
+      Resource plan name: plan_2
       Trigger name: trigger_1
 
 PREHOOK: query: ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_1
@@ -4971,7 +5092,10 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Create Mapping
-      mapping:
+      Entity name: user1
+      Entity type: USER
+      Pool path: def
+      Resource plan name: plan_2
 
 PREHOOK: query: CREATE USER MAPPING "user1" IN plan_2 TO def
 PREHOOK: type: CREATE MAPPING
@@ -5009,7 +5133,10 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Create Mapping
-      mapping:
+      Entity name: group3
+      Entity type: GROUP
+      Ordering: 1
+      Resource plan name: plan_2
 
 PREHOOK: query: CREATE GROUP MAPPING 'group3' IN plan_2 UNMANAGED WITH ORDER 1
 PREHOOK: type: CREATE MAPPING
@@ -5027,7 +5154,9 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Alter Mapping
-      mapping:
+      Entity name: user1
+      Entity type: USER
+      Resource plan name: plan_2
 
 PREHOOK: query: ALTER USER MAPPING "user1" IN plan_2 UNMANAGED
 PREHOOK: type: ALTER MAPPING
@@ -5081,7 +5210,9 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Drop mapping
-      mapping:
+      Entity name: user2
+      Entity type: USER
+      Resource plan name: plan_2
 
 PREHOOK: query: DROP USER MAPPING "user2" in plan_2
 PREHOOK: type: DROP MAPPING
@@ -5099,7 +5230,9 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-0
     Drop mapping
-      mapping:
+      Entity name: group2
+      Entity type: GROUP
+      Resource plan name: plan_2
 
 PREHOOK: query: DROP GROUP MAPPING "group2" in plan_2
 PREHOOK: type: DROP MAPPING
@@ -5325,6 +5458,20 @@ POSTHOOK: Input: sys@wm_mappings
 #### A masked pattern was here ####
 plan_4a	default	USER	user1	pool1	0
 plan_4b	default	USER	user1	pool1	0
+PREHOOK: query: EXPLAIN REPLACE RESOURCE PLAN plan_4a WITH plan_4b
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: EXPLAIN REPLACE RESOURCE PLAN plan_4a WITH plan_4b
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Replace Resource plan
+      Destination Resource plan name: plan_4a
+      Resource plan name: plan_4b
+
 PREHOOK: query: REPLACE RESOURCE PLAN plan_4a WITH plan_4b
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -5404,6 +5551,22 @@ PREHOOK: type: CREATE POOL
 PREHOOK: Output: dummyHostnameForTest
 POSTHOOK: query: CREATE POOL plan_4a.pool3 WITH SCHEDULING_POLICY='fair', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.0
 POSTHOOK: type: CREATE POOL
+PREHOOK: query: EXPLAIN ALTER RESOURCE PLAN plan_4a ENABLE ACTIVATE WITH REPLACE
+PREHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: EXPLAIN ALTER RESOURCE PLAN plan_4a ENABLE ACTIVATE WITH REPLACE
+POSTHOOK: type: ALTER RESOURCEPLAN
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Enable Resource plan
+      Resource plan name: plan_4a
+      Activate: true
+      Enable: true
+      Replace: true
+
 PREHOOK: query: ALTER RESOURCE PLAN plan_4a ENABLE ACTIVATE WITH REPLACE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
diff --git a/ql/src/test/results/clientpositive/resourceplan.q.out b/ql/src/test/results/clientpositive/resourceplan.q.out
deleted file mode 100644
index bf79d75..0000000
--- a/ql/src/test/results/clientpositive/resourceplan.q.out
+++ /dev/null
@@ -1,5440 +0,0 @@
-PREHOOK: query: show grant user hive_test_user
-PREHOOK: type: SHOW_GRANT
-POSTHOOK: query: show grant user hive_test_user
-POSTHOOK: type: SHOW_GRANT
-default	alltypesorc			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	alltypesorc			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	alltypesorc			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	alltypesorc			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	alltypesparquet			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	alltypesparquet			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	alltypesparquet			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	alltypesparquet			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	cbo_t1			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	cbo_t1			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	cbo_t1			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	cbo_t1			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	cbo_t2			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	cbo_t2			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	cbo_t2			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	cbo_t2			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	cbo_t3			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	cbo_t3			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	cbo_t3			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	cbo_t3			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	lineitem			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	lineitem			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	lineitem			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	lineitem			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	part			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	part			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	part			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	part			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	src			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	src			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	src			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	src			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	src1			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	src1			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	src1			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	src1			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	src_cbo			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	src_cbo			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	src_cbo			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	src_cbo			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	src_json			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	src_json			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	src_json			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	src_json			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	src_sequencefile			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	src_sequencefile			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	src_sequencefile			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	src_sequencefile			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	src_thrift			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	src_thrift			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	src_thrift			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	src_thrift			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	srcbucket			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	srcbucket			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	srcbucket			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	srcbucket			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	srcbucket2			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	srcbucket2			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	srcbucket2			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	srcbucket2			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-default	srcpart			hive_test_user	USER	DELETE	true	-1	hive_test_user
-default	srcpart			hive_test_user	USER	INSERT	true	-1	hive_test_user
-default	srcpart			hive_test_user	USER	SELECT	true	-1	hive_test_user
-default	srcpart			hive_test_user	USER	UPDATE	true	-1	hive_test_user
-PREHOOK: query: CREATE DATABASE IF NOT EXISTS SYS
-PREHOOK: type: CREATEDATABASE
-PREHOOK: Output: database:SYS
-POSTHOOK: query: CREATE DATABASE IF NOT EXISTS SYS
-POSTHOOK: type: CREATEDATABASE
-POSTHOOK: Output: database:SYS
-PREHOOK: query: USE SYS
-PREHOOK: type: SWITCHDATABASE
-PREHOOK: Input: database:sys
-POSTHOOK: query: USE SYS
-POSTHOOK: type: SWITCHDATABASE
-POSTHOOK: Input: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `BUCKETING_COLS` (
-  `SD_ID` bigint,
-  `BUCKET_COL_NAME` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_BUCKETING_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"BUCKET_COL_NAME\",
-  \"INTEGER_IDX\"
-FROM
-  \"BUCKETING_COLS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@BUCKETING_COLS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `BUCKETING_COLS` (
-  `SD_ID` bigint,
-  `BUCKET_COL_NAME` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_BUCKETING_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"BUCKET_COL_NAME\",
-  \"INTEGER_IDX\"
-FROM
-  \"BUCKETING_COLS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@BUCKETING_COLS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `CDS` (
-  `CD_ID` bigint,
-  CONSTRAINT `SYS_PK_CDS` PRIMARY KEY (`CD_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"CD_ID\"
-FROM
-  \"CDS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@CDS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `CDS` (
-  `CD_ID` bigint,
-  CONSTRAINT `SYS_PK_CDS` PRIMARY KEY (`CD_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"CD_ID\"
-FROM
-  \"CDS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@CDS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COLUMNS_V2` (
-  `CD_ID` bigint,
-  `COMMENT` string,
-  `COLUMN_NAME` string,
-  `TYPE_NAME` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_COLUMN_V2` PRIMARY KEY (`CD_ID`,`COLUMN_NAME`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"CD_ID\",
-  \"COMMENT\",
-  \"COLUMN_NAME\",
-  \"TYPE_NAME\",
-  \"INTEGER_IDX\"
-FROM
-  \"COLUMNS_V2\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@COLUMNS_V2
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COLUMNS_V2` (
-  `CD_ID` bigint,
-  `COMMENT` string,
-  `COLUMN_NAME` string,
-  `TYPE_NAME` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_COLUMN_V2` PRIMARY KEY (`CD_ID`,`COLUMN_NAME`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"CD_ID\",
-  \"COMMENT\",
-  \"COLUMN_NAME\",
-  \"TYPE_NAME\",
-  \"INTEGER_IDX\"
-FROM
-  \"COLUMNS_V2\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@COLUMNS_V2
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DATABASE_PARAMS` (
-  `DB_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_DATABASE_PARAMS` PRIMARY KEY (`DB_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"DB_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"DATABASE_PARAMS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@DATABASE_PARAMS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DATABASE_PARAMS` (
-  `DB_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_DATABASE_PARAMS` PRIMARY KEY (`DB_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"DB_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"DATABASE_PARAMS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@DATABASE_PARAMS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DBS` (
-  `DB_ID` bigint,
-  `DB_LOCATION_URI` string,
-  `NAME` string,
-  `OWNER_NAME` string,
-  `OWNER_TYPE` string,
-  CONSTRAINT `SYS_PK_DBS` PRIMARY KEY (`DB_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"DB_ID\",
-  \"DB_LOCATION_URI\",
-  \"NAME\",
-  \"OWNER_NAME\",
-  \"OWNER_TYPE\"
-FROM
-  \"DBS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@DBS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DBS` (
-  `DB_ID` bigint,
-  `DB_LOCATION_URI` string,
-  `NAME` string,
-  `OWNER_NAME` string,
-  `OWNER_TYPE` string,
-  CONSTRAINT `SYS_PK_DBS` PRIMARY KEY (`DB_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"DB_ID\",
-  \"DB_LOCATION_URI\",
-  \"NAME\",
-  \"OWNER_NAME\",
-  \"OWNER_TYPE\"
-FROM
-  \"DBS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@DBS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DB_PRIVS` (
-  `DB_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `DB_ID` bigint,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `DB_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_DB_PRIVS` PRIMARY KEY (`DB_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"DB_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"DB_ID\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"DB_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"DB_PRIVS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@DB_PRIVS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DB_PRIVS` (
-  `DB_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `DB_ID` bigint,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `DB_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_DB_PRIVS` PRIMARY KEY (`DB_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"DB_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"DB_ID\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"DB_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"DB_PRIVS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@DB_PRIVS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
-  `USER_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` string,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `USER_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_GLOBAL_PRIVS` PRIMARY KEY (`USER_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"USER_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"USER_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"GLOBAL_PRIVS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@GLOBAL_PRIVS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
-  `USER_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` string,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `USER_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_GLOBAL_PRIVS` PRIMARY KEY (`USER_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"USER_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"USER_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"GLOBAL_PRIVS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@GLOBAL_PRIVS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITIONS` (
-  `PART_ID` bigint,
-  `CREATE_TIME` int,
-  `LAST_ACCESS_TIME` int,
-  `PART_NAME` string,
-  `SD_ID` bigint,
-  `TBL_ID` bigint,
-  CONSTRAINT `SYS_PK_PARTITIONS` PRIMARY KEY (`PART_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_ID\",
-  \"CREATE_TIME\",
-  \"LAST_ACCESS_TIME\",
-  \"PART_NAME\",
-  \"SD_ID\",
-  \"TBL_ID\"
-FROM
-  \"PARTITIONS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PARTITIONS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITIONS` (
-  `PART_ID` bigint,
-  `CREATE_TIME` int,
-  `LAST_ACCESS_TIME` int,
-  `PART_NAME` string,
-  `SD_ID` bigint,
-  `TBL_ID` bigint,
-  CONSTRAINT `SYS_PK_PARTITIONS` PRIMARY KEY (`PART_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_ID\",
-  \"CREATE_TIME\",
-  \"LAST_ACCESS_TIME\",
-  \"PART_NAME\",
-  \"SD_ID\",
-  \"TBL_ID\"
-FROM
-  \"PARTITIONS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PARTITIONS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITION_KEYS` (
-  `TBL_ID` bigint,
-  `PKEY_COMMENT` string,
-  `PKEY_NAME` string,
-  `PKEY_TYPE` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_PARTITION_KEYS` PRIMARY KEY (`TBL_ID`,`PKEY_NAME`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_ID\",
-  \"PKEY_COMMENT\",
-  \"PKEY_NAME\",
-  \"PKEY_TYPE\",
-  \"INTEGER_IDX\"
-FROM
-  \"PARTITION_KEYS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PARTITION_KEYS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITION_KEYS` (
-  `TBL_ID` bigint,
-  `PKEY_COMMENT` string,
-  `PKEY_NAME` string,
-  `PKEY_TYPE` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_PARTITION_KEYS` PRIMARY KEY (`TBL_ID`,`PKEY_NAME`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_ID\",
-  \"PKEY_COMMENT\",
-  \"PKEY_NAME\",
-  \"PKEY_TYPE\",
-  \"INTEGER_IDX\"
-FROM
-  \"PARTITION_KEYS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PARTITION_KEYS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITION_KEY_VALS` (
-  `PART_ID` bigint,
-  `PART_KEY_VAL` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_PARTITION_KEY_VALS` PRIMARY KEY (`PART_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_ID\",
-  \"PART_KEY_VAL\",
-  \"INTEGER_IDX\"
-FROM
-  \"PARTITION_KEY_VALS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PARTITION_KEY_VALS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITION_KEY_VALS` (
-  `PART_ID` bigint,
-  `PART_KEY_VAL` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_PARTITION_KEY_VALS` PRIMARY KEY (`PART_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_ID\",
-  \"PART_KEY_VAL\",
-  \"INTEGER_IDX\"
-FROM
-  \"PARTITION_KEY_VALS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PARTITION_KEY_VALS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITION_PARAMS` (
-  `PART_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_PARTITION_PARAMS` PRIMARY KEY (`PART_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"PARTITION_PARAMS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PARTITION_PARAMS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PARTITION_PARAMS` (
-  `PART_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_PARTITION_PARAMS` PRIMARY KEY (`PART_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"PARTITION_PARAMS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PARTITION_PARAMS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PART_COL_PRIVS` (
-  `PART_COLUMN_GRANT_ID` bigint,
-  `COLUMN_NAME` string,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PART_ID` bigint,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `PART_COL_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_PART_COL_PRIVS` PRIMARY KEY (`PART_COLUMN_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_COLUMN_GRANT_ID\",
-  \"COLUMN_NAME\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PART_ID\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"PART_COL_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"PART_COL_PRIVS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PART_COL_PRIVS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PART_COL_PRIVS` (
-  `PART_COLUMN_GRANT_ID` bigint,
-  `COLUMN_NAME` string,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PART_ID` bigint,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `PART_COL_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_PART_COL_PRIVS` PRIMARY KEY (`PART_COLUMN_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_COLUMN_GRANT_ID\",
-  \"COLUMN_NAME\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PART_ID\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"PART_COL_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"PART_COL_PRIVS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PART_COL_PRIVS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PART_PRIVS` (
-  `PART_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PART_ID` bigint,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `PART_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_PART_PRIVS` PRIMARY KEY (`PART_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PART_ID\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"PART_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"PART_PRIVS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PART_PRIVS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PART_PRIVS` (
-  `PART_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PART_ID` bigint,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `PART_PRIV` string,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_PART_PRIVS` PRIMARY KEY (`PART_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"PART_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PART_ID\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"PART_PRIV\",
-  \"AUTHORIZER\"
-FROM
-  \"PART_PRIVS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PART_PRIVS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `ROLES` (
-  `ROLE_ID` bigint,
-  `CREATE_TIME` int,
-  `OWNER_NAME` string,
-  `ROLE_NAME` string,
-  CONSTRAINT `SYS_PK_ROLES` PRIMARY KEY (`ROLE_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"ROLE_ID\",
-  \"CREATE_TIME\",
-  \"OWNER_NAME\",
-  \"ROLE_NAME\"
-FROM
-  \"ROLES\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@ROLES
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `ROLES` (
-  `ROLE_ID` bigint,
-  `CREATE_TIME` int,
-  `OWNER_NAME` string,
-  `ROLE_NAME` string,
-  CONSTRAINT `SYS_PK_ROLES` PRIMARY KEY (`ROLE_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"ROLE_ID\",
-  \"CREATE_TIME\",
-  \"OWNER_NAME\",
-  \"ROLE_NAME\"
-FROM
-  \"ROLES\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@ROLES
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `ROLE_MAP` (
-  `ROLE_GRANT_ID` bigint,
-  `ADD_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `ROLE_ID` bigint,
-  CONSTRAINT `SYS_PK_ROLE_MAP` PRIMARY KEY (`ROLE_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"ROLE_GRANT_ID\",
-  \"ADD_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"ROLE_ID\"
-FROM
-  \"ROLE_MAP\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@ROLE_MAP
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `ROLE_MAP` (
-  `ROLE_GRANT_ID` bigint,
-  `ADD_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `ROLE_ID` bigint,
-  CONSTRAINT `SYS_PK_ROLE_MAP` PRIMARY KEY (`ROLE_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"ROLE_GRANT_ID\",
-  \"ADD_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"ROLE_ID\"
-FROM
-  \"ROLE_MAP\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@ROLE_MAP
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SDS` (
-  `SD_ID` bigint,
-  `CD_ID` bigint,
-  `INPUT_FORMAT` string,
-  `IS_COMPRESSED` boolean,
-  `IS_STOREDASSUBDIRECTORIES` boolean,
-  `LOCATION` string,
-  `NUM_BUCKETS` int,
-  `OUTPUT_FORMAT` string,
-  `SERDE_ID` bigint,
-  CONSTRAINT `SYS_PK_SDS` PRIMARY KEY (`SD_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"CD_ID\",
-  \"INPUT_FORMAT\",
-  \"IS_COMPRESSED\",
-  \"IS_STOREDASSUBDIRECTORIES\",
-  \"LOCATION\",
-  \"NUM_BUCKETS\",
-  \"OUTPUT_FORMAT\",
-  \"SERDE_ID\"
-FROM
-  \"SDS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SDS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SDS` (
-  `SD_ID` bigint,
-  `CD_ID` bigint,
-  `INPUT_FORMAT` string,
-  `IS_COMPRESSED` boolean,
-  `IS_STOREDASSUBDIRECTORIES` boolean,
-  `LOCATION` string,
-  `NUM_BUCKETS` int,
-  `OUTPUT_FORMAT` string,
-  `SERDE_ID` bigint,
-  CONSTRAINT `SYS_PK_SDS` PRIMARY KEY (`SD_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"CD_ID\",
-  \"INPUT_FORMAT\",
-  \"IS_COMPRESSED\",
-  \"IS_STOREDASSUBDIRECTORIES\",
-  \"LOCATION\",
-  \"NUM_BUCKETS\",
-  \"OUTPUT_FORMAT\",
-  \"SERDE_ID\"
-FROM
-  \"SDS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SDS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SD_PARAMS` (
-  `SD_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_SD_PARAMS` PRIMARY KEY (`SD_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"SD_PARAMS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SD_PARAMS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SD_PARAMS` (
-  `SD_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_SD_PARAMS` PRIMARY KEY (`SD_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"SD_PARAMS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SD_PARAMS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SEQUENCE_TABLE` (
-  `SEQUENCE_NAME` string,
-  `NEXT_VAL` bigint,
-  CONSTRAINT `SYS_PK_SEQUENCE_TABLE` PRIMARY KEY (`SEQUENCE_NAME`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SEQUENCE_NAME\",
-  \"NEXT_VAL\"
-FROM
-  \"SEQUENCE_TABLE\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SEQUENCE_TABLE
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SEQUENCE_TABLE` (
-  `SEQUENCE_NAME` string,
-  `NEXT_VAL` bigint,
-  CONSTRAINT `SYS_PK_SEQUENCE_TABLE` PRIMARY KEY (`SEQUENCE_NAME`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SEQUENCE_NAME\",
-  \"NEXT_VAL\"
-FROM
-  \"SEQUENCE_TABLE\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SEQUENCE_TABLE
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SERDES` (
-  `SERDE_ID` bigint,
-  `NAME` string,
-  `SLIB` string,
-  CONSTRAINT `SYS_PK_SERDES` PRIMARY KEY (`SERDE_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SERDE_ID\",
-  \"NAME\",
-  \"SLIB\"
-FROM
-  \"SERDES\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SERDES
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SERDES` (
-  `SERDE_ID` bigint,
-  `NAME` string,
-  `SLIB` string,
-  CONSTRAINT `SYS_PK_SERDES` PRIMARY KEY (`SERDE_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SERDE_ID\",
-  \"NAME\",
-  \"SLIB\"
-FROM
-  \"SERDES\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SERDES
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SERDE_PARAMS` (
-  `SERDE_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_SERDE_PARAMS` PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SERDE_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"SERDE_PARAMS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SERDE_PARAMS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SERDE_PARAMS` (
-  `SERDE_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_SERDE_PARAMS` PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SERDE_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"SERDE_PARAMS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SERDE_PARAMS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_COL_NAMES` (
-  `SD_ID` bigint,
-  `SKEWED_COL_NAME` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SKEWED_COL_NAMES` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"SKEWED_COL_NAME\",
-  \"INTEGER_IDX\"
-FROM
-  \"SKEWED_COL_NAMES\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SKEWED_COL_NAMES
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_COL_NAMES` (
-  `SD_ID` bigint,
-  `SKEWED_COL_NAME` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SKEWED_COL_NAMES` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"SKEWED_COL_NAME\",
-  \"INTEGER_IDX\"
-FROM
-  \"SKEWED_COL_NAMES\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SKEWED_COL_NAMES
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_COL_VALUE_LOC_MAP` (
-  `SD_ID` bigint,
-  `STRING_LIST_ID_KID` bigint,
-  `LOCATION` string,
-  CONSTRAINT `SYS_PK_COL_VALUE_LOC_MAP` PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"STRING_LIST_ID_KID\",
-  \"LOCATION\"
-FROM
-  \"SKEWED_COL_VALUE_LOC_MAP\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SKEWED_COL_VALUE_LOC_MAP
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_COL_VALUE_LOC_MAP` (
-  `SD_ID` bigint,
-  `STRING_LIST_ID_KID` bigint,
-  `LOCATION` string,
-  CONSTRAINT `SYS_PK_COL_VALUE_LOC_MAP` PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"STRING_LIST_ID_KID\",
-  \"LOCATION\"
-FROM
-  \"SKEWED_COL_VALUE_LOC_MAP\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SKEWED_COL_VALUE_LOC_MAP
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_STRING_LIST` (
-  `STRING_LIST_ID` bigint,
-  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST` PRIMARY KEY (`STRING_LIST_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"STRING_LIST_ID\"
-FROM
-  \"SKEWED_STRING_LIST\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SKEWED_STRING_LIST
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_STRING_LIST` (
-  `STRING_LIST_ID` bigint,
-  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST` PRIMARY KEY (`STRING_LIST_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"STRING_LIST_ID\"
-FROM
-  \"SKEWED_STRING_LIST\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SKEWED_STRING_LIST
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_STRING_LIST_VALUES` (
-  `STRING_LIST_ID` bigint,
-  `STRING_LIST_VALUE` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST_VALUES` PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"STRING_LIST_ID\",
-  \"STRING_LIST_VALUE\",
-  \"INTEGER_IDX\"
-FROM
-  \"SKEWED_STRING_LIST_VALUES\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SKEWED_STRING_LIST_VALUES
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_STRING_LIST_VALUES` (
-  `STRING_LIST_ID` bigint,
-  `STRING_LIST_VALUE` string,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SKEWED_STRING_LIST_VALUES` PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"STRING_LIST_ID\",
-  \"STRING_LIST_VALUE\",
-  \"INTEGER_IDX\"
-FROM
-  \"SKEWED_STRING_LIST_VALUES\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SKEWED_STRING_LIST_VALUES
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_VALUES` (
-  `SD_ID_OID` bigint,
-  `STRING_LIST_ID_EID` bigint,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SKEWED_VALUES` PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID_OID\",
-  \"STRING_LIST_ID_EID\",
-  \"INTEGER_IDX\"
-FROM
-  \"SKEWED_VALUES\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SKEWED_VALUES
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SKEWED_VALUES` (
-  `SD_ID_OID` bigint,
-  `STRING_LIST_ID_EID` bigint,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SKEWED_VALUES` PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID_OID\",
-  \"STRING_LIST_ID_EID\",
-  \"INTEGER_IDX\"
-FROM
-  \"SKEWED_VALUES\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SKEWED_VALUES
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SORT_COLS` (
-  `SD_ID` bigint,
-  `COLUMN_NAME` string,
-  `ORDER` int,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SORT_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"COLUMN_NAME\",
-  \"ORDER\",
-  \"INTEGER_IDX\"
-FROM
-  \"SORT_COLS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@SORT_COLS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `SORT_COLS` (
-  `SD_ID` bigint,
-  `COLUMN_NAME` string,
-  `ORDER` int,
-  `INTEGER_IDX` int,
-  CONSTRAINT `SYS_PK_SORT_COLS` PRIMARY KEY (`SD_ID`,`INTEGER_IDX`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"SD_ID\",
-  \"COLUMN_NAME\",
-  \"ORDER\",
-  \"INTEGER_IDX\"
-FROM
-  \"SORT_COLS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@SORT_COLS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TABLE_PARAMS` (
-  `TBL_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_TABLE_PARAMS` PRIMARY KEY (`TBL_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"TABLE_PARAMS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@TABLE_PARAMS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TABLE_PARAMS` (
-  `TBL_ID` bigint,
-  `PARAM_KEY` string,
-  `PARAM_VALUE` string,
-  CONSTRAINT `SYS_PK_TABLE_PARAMS` PRIMARY KEY (`TBL_ID`,`PARAM_KEY`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_ID\",
-  \"PARAM_KEY\",
-  \"PARAM_VALUE\"
-FROM
-  \"TABLE_PARAMS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@TABLE_PARAMS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TBLS` (
-  `TBL_ID` bigint,
-  `CREATE_TIME` int,
-  `DB_ID` bigint,
-  `LAST_ACCESS_TIME` int,
-  `OWNER` string,
-  `RETENTION` int,
-  `SD_ID` bigint,
-  `TBL_NAME` string,
-  `TBL_TYPE` string,
-  `VIEW_EXPANDED_TEXT` string,
-  `VIEW_ORIGINAL_TEXT` string,
-  `IS_REWRITE_ENABLED` boolean,
-  CONSTRAINT `SYS_PK_TBLS` PRIMARY KEY (`TBL_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_ID\",
-  \"CREATE_TIME\",
-  \"DB_ID\",
-  \"LAST_ACCESS_TIME\",
-  \"OWNER\",
-  \"RETENTION\",
-  \"SD_ID\",
-  \"TBL_NAME\",
-  \"TBL_TYPE\",
-  \"VIEW_EXPANDED_TEXT\",
-  \"VIEW_ORIGINAL_TEXT\",
-  \"IS_REWRITE_ENABLED\"
-FROM \"TBLS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@TBLS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TBLS` (
-  `TBL_ID` bigint,
-  `CREATE_TIME` int,
-  `DB_ID` bigint,
-  `LAST_ACCESS_TIME` int,
-  `OWNER` string,
-  `RETENTION` int,
-  `SD_ID` bigint,
-  `TBL_NAME` string,
-  `TBL_TYPE` string,
-  `VIEW_EXPANDED_TEXT` string,
-  `VIEW_ORIGINAL_TEXT` string,
-  `IS_REWRITE_ENABLED` boolean,
-  CONSTRAINT `SYS_PK_TBLS` PRIMARY KEY (`TBL_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_ID\",
-  \"CREATE_TIME\",
-  \"DB_ID\",
-  \"LAST_ACCESS_TIME\",
-  \"OWNER\",
-  \"RETENTION\",
-  \"SD_ID\",
-  \"TBL_NAME\",
-  \"TBL_TYPE\",
-  \"VIEW_EXPANDED_TEXT\",
-  \"VIEW_ORIGINAL_TEXT\",
-  \"IS_REWRITE_ENABLED\"
-FROM \"TBLS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@TBLS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `MV_CREATION_METADATA` (
-  `MV_CREATION_METADATA_ID` bigint,
-  `DB_NAME` string,
-  `TBL_NAME` string,
-  `TXN_LIST` string,
-  CONSTRAINT `SYS_PK_MV_CREATION_METADATA` PRIMARY KEY (`MV_CREATION_METADATA_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"MV_CREATION_METADATA_ID\",
-  \"DB_NAME\",
-  \"TBL_NAME\",
-  \"TXN_LIST\"
-FROM \"MV_CREATION_METADATA\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@MV_CREATION_METADATA
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `MV_CREATION_METADATA` (
-  `MV_CREATION_METADATA_ID` bigint,
-  `DB_NAME` string,
-  `TBL_NAME` string,
-  `TXN_LIST` string,
-  CONSTRAINT `SYS_PK_MV_CREATION_METADATA` PRIMARY KEY (`MV_CREATION_METADATA_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"MV_CREATION_METADATA_ID\",
-  \"DB_NAME\",
-  \"TBL_NAME\",
-  \"TXN_LIST\"
-FROM \"MV_CREATION_METADATA\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@MV_CREATION_METADATA
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `MV_TABLES_USED` (
-  `MV_CREATION_METADATA_ID` bigint,
-  `TBL_ID` bigint
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"MV_CREATION_METADATA_ID\",
-  \"TBL_ID\"
-FROM \"MV_TABLES_USED\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@MV_TABLES_USED
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `MV_TABLES_USED` (
-  `MV_CREATION_METADATA_ID` bigint,
-  `TBL_ID` bigint
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"MV_CREATION_METADATA_ID\",
-  \"TBL_ID\"
-FROM \"MV_TABLES_USED\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@MV_TABLES_USED
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
-  `TBL_COLUMN_GRANT_ID` bigint,
-  `COLUMN_NAME` string,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `TBL_COL_PRIV` string,
-  `TBL_ID` bigint,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_TBL_COL_PRIVS` PRIMARY KEY (`TBL_COLUMN_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_COLUMN_GRANT_ID\",
-  \"COLUMN_NAME\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"TBL_COL_PRIV\",
-  \"TBL_ID\",
-  \"AUTHORIZER\"
-FROM
-  \"TBL_COL_PRIVS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@TBL_COL_PRIVS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
-  `TBL_COLUMN_GRANT_ID` bigint,
-  `COLUMN_NAME` string,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `TBL_COL_PRIV` string,
-  `TBL_ID` bigint,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_TBL_COL_PRIVS` PRIMARY KEY (`TBL_COLUMN_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_COLUMN_GRANT_ID\",
-  \"COLUMN_NAME\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"TBL_COL_PRIV\",
-  \"TBL_ID\",
-  \"AUTHORIZER\"
-FROM
-  \"TBL_COL_PRIVS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@TBL_COL_PRIVS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TBL_PRIVS` (
-  `TBL_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `TBL_PRIV` string,
-  `TBL_ID` bigint,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_TBL_PRIVS` PRIMARY KEY (`TBL_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"TBL_PRIV\",
-  \"TBL_ID\",
-  \"AUTHORIZER\"
-FROM
-  \"TBL_PRIVS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@TBL_PRIVS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TBL_PRIVS` (
-  `TBL_GRANT_ID` bigint,
-  `CREATE_TIME` int,
-  `GRANT_OPTION` int,
-  `GRANTOR` string,
-  `GRANTOR_TYPE` string,
-  `PRINCIPAL_NAME` string,
-  `PRINCIPAL_TYPE` string,
-  `TBL_PRIV` string,
-  `TBL_ID` bigint,
-  `AUTHORIZER` string,
-  CONSTRAINT `SYS_PK_TBL_PRIVS` PRIMARY KEY (`TBL_GRANT_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"TBL_GRANT_ID\",
-  \"CREATE_TIME\",
-  \"GRANT_OPTION\",
-  \"GRANTOR\",
-  \"GRANTOR_TYPE\",
-  \"PRINCIPAL_NAME\",
-  \"PRINCIPAL_TYPE\",
-  \"TBL_PRIV\",
-  \"TBL_ID\",
-  \"AUTHORIZER\"
-FROM
-  \"TBL_PRIVS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@TBL_PRIVS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TAB_COL_STATS` (
- `CS_ID` bigint,
- `DB_NAME` string,
- `TABLE_NAME` string,
- `COLUMN_NAME` string,
- `COLUMN_TYPE` string,
- `TBL_ID` bigint,
- `LONG_LOW_VALUE` bigint,
- `LONG_HIGH_VALUE` bigint,
- `DOUBLE_HIGH_VALUE` double,
- `DOUBLE_LOW_VALUE` double,
- `BIG_DECIMAL_LOW_VALUE` string,
- `BIG_DECIMAL_HIGH_VALUE` string,
- `NUM_NULLS` bigint,
- `NUM_DISTINCTS` bigint,
- `AVG_COL_LEN` double,
- `MAX_COL_LEN` bigint,
- `NUM_TRUES` bigint,
- `NUM_FALSES` bigint,
- `LAST_ANALYZED` bigint,
-  CONSTRAINT `SYS_PK_TAB_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
- \"CS_ID\",
- \"DB_NAME\",
- \"TABLE_NAME\",
- \"COLUMN_NAME\",
- \"COLUMN_TYPE\",
- \"TBL_ID\",
- \"LONG_LOW_VALUE\",
- \"LONG_HIGH_VALUE\",
- \"DOUBLE_HIGH_VALUE\",
- \"DOUBLE_LOW_VALUE\",
- \"BIG_DECIMAL_LOW_VALUE\",
- \"BIG_DECIMAL_HIGH_VALUE\",
- \"NUM_NULLS\",
- \"NUM_DISTINCTS\",
- \"AVG_COL_LEN\",
- \"MAX_COL_LEN\",
- \"NUM_TRUES\",
- \"NUM_FALSES\",
- \"LAST_ANALYZED\"
-FROM
-  \"TAB_COL_STATS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@TAB_COL_STATS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `TAB_COL_STATS` (
- `CS_ID` bigint,
- `DB_NAME` string,
- `TABLE_NAME` string,
- `COLUMN_NAME` string,
- `COLUMN_TYPE` string,
- `TBL_ID` bigint,
- `LONG_LOW_VALUE` bigint,
- `LONG_HIGH_VALUE` bigint,
- `DOUBLE_HIGH_VALUE` double,
- `DOUBLE_LOW_VALUE` double,
- `BIG_DECIMAL_LOW_VALUE` string,
- `BIG_DECIMAL_HIGH_VALUE` string,
- `NUM_NULLS` bigint,
- `NUM_DISTINCTS` bigint,
- `AVG_COL_LEN` double,
- `MAX_COL_LEN` bigint,
- `NUM_TRUES` bigint,
- `NUM_FALSES` bigint,
- `LAST_ANALYZED` bigint,
-  CONSTRAINT `SYS_PK_TAB_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
- \"CS_ID\",
- \"DB_NAME\",
- \"TABLE_NAME\",
- \"COLUMN_NAME\",
- \"COLUMN_TYPE\",
- \"TBL_ID\",
- \"LONG_LOW_VALUE\",
- \"LONG_HIGH_VALUE\",
- \"DOUBLE_HIGH_VALUE\",
- \"DOUBLE_LOW_VALUE\",
- \"BIG_DECIMAL_LOW_VALUE\",
- \"BIG_DECIMAL_HIGH_VALUE\",
- \"NUM_NULLS\",
- \"NUM_DISTINCTS\",
- \"AVG_COL_LEN\",
- \"MAX_COL_LEN\",
- \"NUM_TRUES\",
- \"NUM_FALSES\",
- \"LAST_ANALYZED\"
-FROM
-  \"TAB_COL_STATS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@TAB_COL_STATS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PART_COL_STATS` (
- `CS_ID` bigint,
- `DB_NAME` string,
- `TABLE_NAME` string,
- `PARTITION_NAME` string,
- `COLUMN_NAME` string,
- `COLUMN_TYPE` string,
- `PART_ID` bigint,
- `LONG_LOW_VALUE` bigint,
- `LONG_HIGH_VALUE` bigint,
- `DOUBLE_HIGH_VALUE` double,
- `DOUBLE_LOW_VALUE` double,
- `BIG_DECIMAL_LOW_VALUE` string,
- `BIG_DECIMAL_HIGH_VALUE` string,
- `NUM_NULLS` bigint,
- `NUM_DISTINCTS` bigint,
- `AVG_COL_LEN` double,
- `MAX_COL_LEN` bigint,
- `NUM_TRUES` bigint,
- `NUM_FALSES` bigint,
- `LAST_ANALYZED` bigint,
-  CONSTRAINT `SYS_PK_PART_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
- \"CS_ID\",
- \"DB_NAME\",
- \"TABLE_NAME\",
- \"PARTITION_NAME\",
- \"COLUMN_NAME\",
- \"COLUMN_TYPE\",
- \"PART_ID\",
- \"LONG_LOW_VALUE\",
- \"LONG_HIGH_VALUE\",
- \"DOUBLE_HIGH_VALUE\",
- \"DOUBLE_LOW_VALUE\",
- \"BIG_DECIMAL_LOW_VALUE\",
- \"BIG_DECIMAL_HIGH_VALUE\",
- \"NUM_NULLS\",
- \"NUM_DISTINCTS\",
- \"AVG_COL_LEN\",
- \"MAX_COL_LEN\",
- \"NUM_TRUES\",
- \"NUM_FALSES\",
- \"LAST_ANALYZED\"
-FROM
-  \"PART_COL_STATS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@PART_COL_STATS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `PART_COL_STATS` (
- `CS_ID` bigint,
- `DB_NAME` string,
- `TABLE_NAME` string,
- `PARTITION_NAME` string,
- `COLUMN_NAME` string,
- `COLUMN_TYPE` string,
- `PART_ID` bigint,
- `LONG_LOW_VALUE` bigint,
- `LONG_HIGH_VALUE` bigint,
- `DOUBLE_HIGH_VALUE` double,
- `DOUBLE_LOW_VALUE` double,
- `BIG_DECIMAL_LOW_VALUE` string,
- `BIG_DECIMAL_HIGH_VALUE` string,
- `NUM_NULLS` bigint,
- `NUM_DISTINCTS` bigint,
- `AVG_COL_LEN` double,
- `MAX_COL_LEN` bigint,
- `NUM_TRUES` bigint,
- `NUM_FALSES` bigint,
- `LAST_ANALYZED` bigint,
-  CONSTRAINT `SYS_PK_PART_COL_STATS` PRIMARY KEY (`CS_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
- \"CS_ID\",
- \"DB_NAME\",
- \"TABLE_NAME\",
- \"PARTITION_NAME\",
- \"COLUMN_NAME\",
- \"COLUMN_TYPE\",
- \"PART_ID\",
- \"LONG_LOW_VALUE\",
- \"LONG_HIGH_VALUE\",
- \"DOUBLE_HIGH_VALUE\",
- \"DOUBLE_LOW_VALUE\",
- \"BIG_DECIMAL_LOW_VALUE\",
- \"BIG_DECIMAL_HIGH_VALUE\",
- \"NUM_NULLS\",
- \"NUM_DISTINCTS\",
- \"AVG_COL_LEN\",
- \"MAX_COL_LEN\",
- \"NUM_TRUES\",
- \"NUM_FALSES\",
- \"LAST_ANALYZED\"
-FROM
-  \"PART_COL_STATS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@PART_COL_STATS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE OR REPLACE VIEW `VERSION` AS SELECT 1 AS `VER_ID`, '4.0.0' AS `SCHEMA_VERSION`,
-  'Hive release version 4.0.0' AS `VERSION_COMMENT`
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: SYS@VERSION
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE OR REPLACE VIEW `VERSION` AS SELECT 1 AS `VER_ID`, '4.0.0' AS `SCHEMA_VERSION`,
-  'Hive release version 4.0.0' AS `VERSION_COMMENT`
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: SYS@VERSION
-POSTHOOK: Output: database:sys
-POSTHOOK: Lineage: VERSION.schema_version SIMPLE []
-POSTHOOK: Lineage: VERSION.ver_id SIMPLE []
-POSTHOOK: Lineage: VERSION.version_comment SIMPLE []
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DB_VERSION` (
-  `VER_ID` BIGINT,
-  `SCHEMA_VERSION` string,
-  `VERSION_COMMENT` string,
-  CONSTRAINT `SYS_PK_DB_VERSION` PRIMARY KEY (`VER_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"VER_ID\",
-  \"SCHEMA_VERSION\",
-  \"VERSION_COMMENT\"
-FROM
-  \"VERSION\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@DB_VERSION
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `DB_VERSION` (
-  `VER_ID` BIGINT,
-  `SCHEMA_VERSION` string,
-  `VERSION_COMMENT` string,
-  CONSTRAINT `SYS_PK_DB_VERSION` PRIMARY KEY (`VER_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"VER_ID\",
-  \"SCHEMA_VERSION\",
-  \"VERSION_COMMENT\"
-FROM
-  \"VERSION\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@DB_VERSION
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `FUNCS` (
-  `FUNC_ID` bigint,
-  `CLASS_NAME` string,
-  `CREATE_TIME` int,
-  `DB_ID` bigint,
-  `FUNC_NAME` string,
-  `FUNC_TYPE` int,
-  `OWNER_NAME` string,
-  `OWNER_TYPE` string,
-  CONSTRAINT `SYS_PK_FUNCS` PRIMARY KEY (`FUNC_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"FUNC_ID\",
-  \"CLASS_NAME\",
-  \"CREATE_TIME\",
-  \"DB_ID\",
-  \"FUNC_NAME\",
-  \"FUNC_TYPE\",
-  \"OWNER_NAME\",
-  \"OWNER_TYPE\"
-FROM
-  \"FUNCS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@FUNCS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `FUNCS` (
-  `FUNC_ID` bigint,
-  `CLASS_NAME` string,
-  `CREATE_TIME` int,
-  `DB_ID` bigint,
-  `FUNC_NAME` string,
-  `FUNC_TYPE` int,
-  `OWNER_NAME` string,
-  `OWNER_TYPE` string,
-  CONSTRAINT `SYS_PK_FUNCS` PRIMARY KEY (`FUNC_ID`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"FUNC_ID\",
-  \"CLASS_NAME\",
-  \"CREATE_TIME\",
-  \"DB_ID\",
-  \"FUNC_NAME\",
-  \"FUNC_TYPE\",
-  \"OWNER_NAME\",
-  \"OWNER_TYPE\"
-FROM
-  \"FUNCS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@FUNCS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
-(
-  `CHILD_CD_ID` bigint,
-  `CHILD_INTEGER_IDX` int,
-  `CHILD_TBL_ID` bigint,
-  `PARENT_CD_ID` bigint,
-  `PARENT_INTEGER_IDX` int,
-  `PARENT_TBL_ID` bigint,
-  `POSITION` bigint,
-  `CONSTRAINT_NAME` string,
-  `CONSTRAINT_TYPE` string,
-  `UPDATE_RULE` string,
-  `DELETE_RULE` string,
-  `ENABLE_VALIDATE_RELY` int,
-  `DEFAULT_VALUE` string,
-  CONSTRAINT `SYS_PK_KEY_CONSTRAINTS` PRIMARY KEY (`CONSTRAINT_NAME`, `POSITION`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"CHILD_CD_ID\",
-  \"CHILD_INTEGER_IDX\",
-  \"CHILD_TBL_ID\",
-  \"PARENT_CD_ID\",
-  \"PARENT_INTEGER_IDX\",
-  \"PARENT_TBL_ID\",
-  \"POSITION\",
-  \"CONSTRAINT_NAME\",
-  \"CONSTRAINT_TYPE\",
-  \"UPDATE_RULE\",
-  \"DELETE_RULE\",
-  \"ENABLE_VALIDATE_RELY\",
-  \"DEFAULT_VALUE\"
-FROM
-  \"KEY_CONSTRAINTS\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@KEY_CONSTRAINTS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `KEY_CONSTRAINTS`
-(
-  `CHILD_CD_ID` bigint,
-  `CHILD_INTEGER_IDX` int,
-  `CHILD_TBL_ID` bigint,
-  `PARENT_CD_ID` bigint,
-  `PARENT_INTEGER_IDX` int,
-  `PARENT_TBL_ID` bigint,
-  `POSITION` bigint,
-  `CONSTRAINT_NAME` string,
-  `CONSTRAINT_TYPE` string,
-  `UPDATE_RULE` string,
-  `DELETE_RULE` string,
-  `ENABLE_VALIDATE_RELY` int,
-  `DEFAULT_VALUE` string,
-  CONSTRAINT `SYS_PK_KEY_CONSTRAINTS` PRIMARY KEY (`CONSTRAINT_NAME`, `POSITION`) DISABLE
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"CHILD_CD_ID\",
-  \"CHILD_INTEGER_IDX\",
-  \"CHILD_TBL_ID\",
-  \"PARENT_CD_ID\",
-  \"PARENT_INTEGER_IDX\",
-  \"PARENT_TBL_ID\",
-  \"POSITION\",
-  \"CONSTRAINT_NAME\",
-  \"CONSTRAINT_TYPE\",
-  \"UPDATE_RULE\",
-  \"DELETE_RULE\",
-  \"ENABLE_VALIDATE_RELY\",
-  \"DEFAULT_VALUE\"
-FROM
-  \"KEY_CONSTRAINTS\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@KEY_CONSTRAINTS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE OR REPLACE VIEW `TABLE_STATS_VIEW` AS
-SELECT
-  `TBL_ID`,
-  max(CASE `PARAM_KEY` WHEN 'COLUMN_STATS_ACCURATE' THEN `PARAM_VALUE` END) AS COLUMN_STATS_ACCURATE,
-  max(CASE `PARAM_KEY` WHEN 'numFiles' THEN `PARAM_VALUE` END) AS NUM_FILES,
-  max(CASE `PARAM_KEY` WHEN 'numRows' THEN `PARAM_VALUE` END) AS NUM_ROWS,
-  max(CASE `PARAM_KEY` WHEN 'rawDataSize' THEN `PARAM_VALUE` END) AS RAW_DATA_SIZE,
-  max(CASE `PARAM_KEY` WHEN 'totalSize' THEN `PARAM_VALUE` END) AS TOTAL_SIZE,
-#### A masked pattern was here ####
-FROM `TABLE_PARAMS` GROUP BY `TBL_ID`
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@table_params
-PREHOOK: Output: SYS@TABLE_STATS_VIEW
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE OR REPLACE VIEW `TABLE_STATS_VIEW` AS
-SELECT
-  `TBL_ID`,
-  max(CASE `PARAM_KEY` WHEN 'COLUMN_STATS_ACCURATE' THEN `PARAM_VALUE` END) AS COLUMN_STATS_ACCURATE,
-  max(CASE `PARAM_KEY` WHEN 'numFiles' THEN `PARAM_VALUE` END) AS NUM_FILES,
-  max(CASE `PARAM_KEY` WHEN 'numRows' THEN `PARAM_VALUE` END) AS NUM_ROWS,
-  max(CASE `PARAM_KEY` WHEN 'rawDataSize' THEN `PARAM_VALUE` END) AS RAW_DATA_SIZE,
-  max(CASE `PARAM_KEY` WHEN 'totalSize' THEN `PARAM_VALUE` END) AS TOTAL_SIZE,
-#### A masked pattern was here ####
-FROM `TABLE_PARAMS` GROUP BY `TBL_ID`
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@table_params
-POSTHOOK: Output: SYS@TABLE_STATS_VIEW
-POSTHOOK: Output: database:sys
-POSTHOOK: Lineage: TABLE_STATS_VIEW.column_stats_accurate EXPRESSION [(table_params)table_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (table_params)table_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_STATS_VIEW.num_files EXPRESSION [(table_params)table_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (table_params)table_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_STATS_VIEW.num_rows EXPRESSION [(table_params)table_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (table_params)table_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_STATS_VIEW.raw_data_size EXPRESSION [(table_params)table_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (table_params)table_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_STATS_VIEW.tbl_id SIMPLE [(table_params)table_params.FieldSchema(name:tbl_id, type:bigint, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_STATS_VIEW.total_size EXPRESSION [(table_params)table_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (table_params)table_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_STATS_VIEW.transient_last_ddl_time EXPRESSION [(table_params)table_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (table_params)table_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-PREHOOK: query: CREATE OR REPLACE VIEW `PARTITION_STATS_VIEW` AS
-SELECT
-  `PART_ID`,
-  max(CASE `PARAM_KEY` WHEN 'COLUMN_STATS_ACCURATE' THEN `PARAM_VALUE` END) AS COLUMN_STATS_ACCURATE,
-  max(CASE `PARAM_KEY` WHEN 'numFiles' THEN `PARAM_VALUE` END) AS NUM_FILES,
-  max(CASE `PARAM_KEY` WHEN 'numRows' THEN `PARAM_VALUE` END) AS NUM_ROWS,
-  max(CASE `PARAM_KEY` WHEN 'rawDataSize' THEN `PARAM_VALUE` END) AS RAW_DATA_SIZE,
-  max(CASE `PARAM_KEY` WHEN 'totalSize' THEN `PARAM_VALUE` END) AS TOTAL_SIZE,
-#### A masked pattern was here ####
-FROM `PARTITION_PARAMS` GROUP BY `PART_ID`
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@partition_params
-PREHOOK: Output: SYS@PARTITION_STATS_VIEW
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE OR REPLACE VIEW `PARTITION_STATS_VIEW` AS
-SELECT
-  `PART_ID`,
-  max(CASE `PARAM_KEY` WHEN 'COLUMN_STATS_ACCURATE' THEN `PARAM_VALUE` END) AS COLUMN_STATS_ACCURATE,
-  max(CASE `PARAM_KEY` WHEN 'numFiles' THEN `PARAM_VALUE` END) AS NUM_FILES,
-  max(CASE `PARAM_KEY` WHEN 'numRows' THEN `PARAM_VALUE` END) AS NUM_ROWS,
-  max(CASE `PARAM_KEY` WHEN 'rawDataSize' THEN `PARAM_VALUE` END) AS RAW_DATA_SIZE,
-  max(CASE `PARAM_KEY` WHEN 'totalSize' THEN `PARAM_VALUE` END) AS TOTAL_SIZE,
-#### A masked pattern was here ####
-FROM `PARTITION_PARAMS` GROUP BY `PART_ID`
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@partition_params
-POSTHOOK: Output: SYS@PARTITION_STATS_VIEW
-POSTHOOK: Output: database:sys
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.column_stats_accurate EXPRESSION [(partition_params)partition_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (partition_params)partition_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.num_files EXPRESSION [(partition_params)partition_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (partition_params)partition_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.num_rows EXPRESSION [(partition_params)partition_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (partition_params)partition_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.part_id SIMPLE [(partition_params)partition_params.FieldSchema(name:part_id, type:bigint, comment:from deserializer), ]
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.raw_data_size EXPRESSION [(partition_params)partition_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (partition_params)partition_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.total_size EXPRESSION [(partition_params)partition_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (partition_params)partition_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: PARTITION_STATS_VIEW.transient_last_ddl_time EXPRESSION [(partition_params)partition_params.FieldSchema(name:param_key, type:string, comment:from deserializer), (partition_params)partition_params.FieldSchema(name:param_value, type:string, comment:from deserializer), ]
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_RESOURCEPLANS` (
-  `NAME` string,
-  `NS` string,
-  `STATUS` string,
-  `QUERY_PARALLELISM` int,
-  `DEFAULT_POOL_PATH` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\",
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"STATUS\",
-  \"WM_RESOURCEPLAN\".\"QUERY_PARALLELISM\",
-  \"WM_POOL\".\"PATH\"
-FROM
-  \"WM_RESOURCEPLAN\" LEFT OUTER JOIN \"WM_POOL\" ON \"WM_RESOURCEPLAN\".\"DEFAULT_POOL_ID\" = \"WM_POOL\".\"POOL_ID\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@WM_RESOURCEPLANS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_RESOURCEPLANS` (
-  `NAME` string,
-  `NS` string,
-  `STATUS` string,
-  `QUERY_PARALLELISM` int,
-  `DEFAULT_POOL_PATH` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\",
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"STATUS\",
-  \"WM_RESOURCEPLAN\".\"QUERY_PARALLELISM\",
-  \"WM_POOL\".\"PATH\"
-FROM
-  \"WM_RESOURCEPLAN\" LEFT OUTER JOIN \"WM_POOL\" ON \"WM_RESOURCEPLAN\".\"DEFAULT_POOL_ID\" = \"WM_POOL\".\"POOL_ID\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@WM_RESOURCEPLANS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_TRIGGERS` (
-  `RP_NAME` string,
-  `NS` string,
-  `NAME` string,
-  `TRIGGER_EXPRESSION` string,
-  `ACTION_EXPRESSION` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  r.\"NAME\" AS RP_NAME,
-  case when r.\"NS\" is null then 'default' else r.\"NS\" end,
-  t.\"NAME\" AS NAME,
-  \"TRIGGER_EXPRESSION\",
-  \"ACTION_EXPRESSION\"
-FROM
-  \"WM_TRIGGER\" t
-JOIN
-  \"WM_RESOURCEPLAN\" r
-ON
-  t.\"RP_ID\" = r.\"RP_ID\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@WM_TRIGGERS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_TRIGGERS` (
-  `RP_NAME` string,
-  `NS` string,
-  `NAME` string,
-  `TRIGGER_EXPRESSION` string,
-  `ACTION_EXPRESSION` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  r.\"NAME\" AS RP_NAME,
-  case when r.\"NS\" is null then 'default' else r.\"NS\" end,
-  t.\"NAME\" AS NAME,
-  \"TRIGGER_EXPRESSION\",
-  \"ACTION_EXPRESSION\"
-FROM
-  \"WM_TRIGGER\" t
-JOIN
-  \"WM_RESOURCEPLAN\" r
-ON
-  t.\"RP_ID\" = r.\"RP_ID\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@WM_TRIGGERS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_POOLS` (
-  `RP_NAME` string,
-  `NS` string,
-  `PATH` string,
-  `ALLOC_FRACTION` double,
-  `QUERY_PARALLELISM` int,
-  `SCHEDULING_POLICY` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\",
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"WM_POOL\".\"PATH\",
-  \"WM_POOL\".\"ALLOC_FRACTION\",
-  \"WM_POOL\".\"QUERY_PARALLELISM\",
-  \"WM_POOL\".\"SCHEDULING_POLICY\"
-FROM
-  \"WM_POOL\"
-JOIN
-  \"WM_RESOURCEPLAN\"
-ON
-  \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\""
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@WM_POOLS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_POOLS` (
-  `RP_NAME` string,
-  `NS` string,
-  `PATH` string,
-  `ALLOC_FRACTION` double,
-  `QUERY_PARALLELISM` int,
-  `SCHEDULING_POLICY` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\",
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"WM_POOL\".\"PATH\",
-  \"WM_POOL\".\"ALLOC_FRACTION\",
-  \"WM_POOL\".\"QUERY_PARALLELISM\",
-  \"WM_POOL\".\"SCHEDULING_POLICY\"
-FROM
-  \"WM_POOL\"
-JOIN
-  \"WM_RESOURCEPLAN\"
-ON
-  \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\""
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@WM_POOLS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
-  `RP_NAME` string,
-  `NS` string,
-  `POOL_PATH` string,
-  `TRIGGER_NAME` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"WM_POOL\".\"PATH\" AS POOL_PATH,
-  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
-FROM \"WM_POOL_TO_TRIGGER\"
-  JOIN \"WM_POOL\" ON \"WM_POOL_TO_TRIGGER\".\"POOL_ID\" = \"WM_POOL\".\"POOL_ID\"
-  JOIN \"WM_TRIGGER\" ON \"WM_POOL_TO_TRIGGER\".\"TRIGGER_ID\" = \"WM_TRIGGER\".\"TRIGGER_ID\"
-  JOIN \"WM_RESOURCEPLAN\" ON \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"
-UNION
-SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  '<unmanaged queries>' AS POOL_PATH,
-  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
-FROM \"WM_TRIGGER\"
-  JOIN \"WM_RESOURCEPLAN\" ON \"WM_TRIGGER\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"
-WHERE CAST(\"WM_TRIGGER\".\"IS_IN_UNMANAGED\" AS CHAR) IN ('1', 't')
-"
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@WM_POOLS_TO_TRIGGERS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
-  `RP_NAME` string,
-  `NS` string,
-  `POOL_PATH` string,
-  `TRIGGER_NAME` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"WM_POOL\".\"PATH\" AS POOL_PATH,
-  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
-FROM \"WM_POOL_TO_TRIGGER\"
-  JOIN \"WM_POOL\" ON \"WM_POOL_TO_TRIGGER\".\"POOL_ID\" = \"WM_POOL\".\"POOL_ID\"
-  JOIN \"WM_TRIGGER\" ON \"WM_POOL_TO_TRIGGER\".\"TRIGGER_ID\" = \"WM_TRIGGER\".\"TRIGGER_ID\"
-  JOIN \"WM_RESOURCEPLAN\" ON \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"
-UNION
-SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  '<unmanaged queries>' AS POOL_PATH,
-  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
-FROM \"WM_TRIGGER\"
-  JOIN \"WM_RESOURCEPLAN\" ON \"WM_TRIGGER\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"
-WHERE CAST(\"WM_TRIGGER\".\"IS_IN_UNMANAGED\" AS CHAR) IN ('1', 't')
-"
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@WM_POOLS_TO_TRIGGERS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_MAPPINGS` (
-  `RP_NAME` string,
-  `NS` string,
-  `ENTITY_TYPE` string,
-  `ENTITY_NAME` string,
-  `POOL_PATH` string,
-  `ORDERING` int
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\",
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"ENTITY_TYPE\",
-  \"ENTITY_NAME\",
-  case when \"WM_POOL\".\"PATH\" is null then '<unmanaged>' else \"WM_POOL\".\"PATH\" end,
-  \"ORDERING\"
-FROM \"WM_MAPPING\"
-JOIN \"WM_RESOURCEPLAN\" ON \"WM_MAPPING\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"
-LEFT OUTER JOIN \"WM_POOL\" ON \"WM_POOL\".\"POOL_ID\" = \"WM_MAPPING\".\"POOL_ID\"
-"
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@WM_MAPPINGS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `WM_MAPPINGS` (
-  `RP_NAME` string,
-  `NS` string,
-  `ENTITY_TYPE` string,
-  `ENTITY_NAME` string,
-  `POOL_PATH` string,
-  `ORDERING` int
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"WM_RESOURCEPLAN\".\"NAME\",
-  case when \"WM_RESOURCEPLAN\".\"NS\" is null then 'default' else \"WM_RESOURCEPLAN\".\"NS\" end AS NS,
-  \"ENTITY_TYPE\",
-  \"ENTITY_NAME\",
-  case when \"WM_POOL\".\"PATH\" is null then '<unmanaged>' else \"WM_POOL\".\"PATH\" end,
-  \"ORDERING\"
-FROM \"WM_MAPPING\"
-JOIN \"WM_RESOURCEPLAN\" ON \"WM_MAPPING\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\"
-LEFT OUTER JOIN \"WM_POOL\" ON \"WM_POOL\".\"POOL_ID\" = \"WM_MAPPING\".\"POOL_ID\"
-"
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@WM_MAPPINGS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COMPACTION_QUEUE` (
-  `CQ_ID` bigint,
-  `CQ_DATABASE` string,
-  `CQ_TABLE` string,
-  `CQ_PARTITION` string,
-  `CQ_STATE` string,
-  `CQ_TYPE` string,
-  `CQ_TBLPROPERTIES` string,
-  `CQ_WORKER_ID` string,
-  `CQ_START` bigint,
-  `CQ_RUN_AS` string,
-  `CQ_HIGHEST_WRITE_ID` bigint,
-  `CQ_HADOOP_JOB_ID` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"COMPACTION_QUEUE\".\"CQ_ID\",
-  \"COMPACTION_QUEUE\".\"CQ_DATABASE\",
-  \"COMPACTION_QUEUE\".\"CQ_TABLE\",
-  \"COMPACTION_QUEUE\".\"CQ_PARTITION\",
-  \"COMPACTION_QUEUE\".\"CQ_STATE\",
-  \"COMPACTION_QUEUE\".\"CQ_TYPE\",
-  \"COMPACTION_QUEUE\".\"CQ_TBLPROPERTIES\",
-  \"COMPACTION_QUEUE\".\"CQ_WORKER_ID\",
-  \"COMPACTION_QUEUE\".\"CQ_START\",
-  \"COMPACTION_QUEUE\".\"CQ_RUN_AS\",
-  \"COMPACTION_QUEUE\".\"CQ_HIGHEST_WRITE_ID\",
-  \"COMPACTION_QUEUE\".\"CQ_HADOOP_JOB_ID\"
-FROM \"COMPACTION_QUEUE\"
-"
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@COMPACTION_QUEUE
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COMPACTION_QUEUE` (
-  `CQ_ID` bigint,
-  `CQ_DATABASE` string,
-  `CQ_TABLE` string,
-  `CQ_PARTITION` string,
-  `CQ_STATE` string,
-  `CQ_TYPE` string,
-  `CQ_TBLPROPERTIES` string,
-  `CQ_WORKER_ID` string,
-  `CQ_START` bigint,
-  `CQ_RUN_AS` string,
-  `CQ_HIGHEST_WRITE_ID` bigint,
-  `CQ_HADOOP_JOB_ID` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"COMPACTION_QUEUE\".\"CQ_ID\",
-  \"COMPACTION_QUEUE\".\"CQ_DATABASE\",
-  \"COMPACTION_QUEUE\".\"CQ_TABLE\",
-  \"COMPACTION_QUEUE\".\"CQ_PARTITION\",
-  \"COMPACTION_QUEUE\".\"CQ_STATE\",
-  \"COMPACTION_QUEUE\".\"CQ_TYPE\",
-  \"COMPACTION_QUEUE\".\"CQ_TBLPROPERTIES\",
-  \"COMPACTION_QUEUE\".\"CQ_WORKER_ID\",
-  \"COMPACTION_QUEUE\".\"CQ_START\",
-  \"COMPACTION_QUEUE\".\"CQ_RUN_AS\",
-  \"COMPACTION_QUEUE\".\"CQ_HIGHEST_WRITE_ID\",
-  \"COMPACTION_QUEUE\".\"CQ_HADOOP_JOB_ID\"
-FROM \"COMPACTION_QUEUE\"
-"
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@COMPACTION_QUEUE
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COMPLETED_COMPACTIONS` (
-  `CC_ID` bigint,
-  `CC_DATABASE` string,
-  `CC_TABLE` string,
-  `CC_PARTITION` string,
-  `CC_STATE` string,
-  `CC_TYPE` string,
-  `CC_TBLPROPERTIES` string,
-  `CC_WORKER_ID` string,
-  `CC_START` bigint,
-  `CC_END` bigint,
-  `CC_RUN_AS` string,
-  `CC_HIGHEST_WRITE_ID` bigint,
-  `CC_HADOOP_JOB_ID` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"COMPLETED_COMPACTIONS\".\"CC_ID\",
-  \"COMPLETED_COMPACTIONS\".\"CC_DATABASE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_TABLE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_PARTITION\",
-  \"COMPLETED_COMPACTIONS\".\"CC_STATE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_TYPE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_TBLPROPERTIES\",
-  \"COMPLETED_COMPACTIONS\".\"CC_WORKER_ID\",
-  \"COMPLETED_COMPACTIONS\".\"CC_START\",
-  \"COMPLETED_COMPACTIONS\".\"CC_END\",
-  \"COMPLETED_COMPACTIONS\".\"CC_RUN_AS\",
-  \"COMPLETED_COMPACTIONS\".\"CC_HIGHEST_WRITE_ID\",
-  \"COMPLETED_COMPACTIONS\".\"CC_HADOOP_JOB_ID\"
-FROM \"COMPLETED_COMPACTIONS\"
-"
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: SYS@COMPLETED_COMPACTIONS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE EXTERNAL TABLE IF NOT EXISTS `COMPLETED_COMPACTIONS` (
-  `CC_ID` bigint,
-  `CC_DATABASE` string,
-  `CC_TABLE` string,
-  `CC_PARTITION` string,
-  `CC_STATE` string,
-  `CC_TYPE` string,
-  `CC_TBLPROPERTIES` string,
-  `CC_WORKER_ID` string,
-  `CC_START` bigint,
-  `CC_END` bigint,
-  `CC_RUN_AS` string,
-  `CC_HIGHEST_WRITE_ID` bigint,
-  `CC_HADOOP_JOB_ID` string
-)
-STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
-TBLPROPERTIES (
-"hive.sql.database.type" = "METASTORE",
-"hive.sql.query" =
-"SELECT
-  \"COMPLETED_COMPACTIONS\".\"CC_ID\",
-  \"COMPLETED_COMPACTIONS\".\"CC_DATABASE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_TABLE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_PARTITION\",
-  \"COMPLETED_COMPACTIONS\".\"CC_STATE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_TYPE\",
-  \"COMPLETED_COMPACTIONS\".\"CC_TBLPROPERTIES\",
-  \"COMPLETED_COMPACTIONS\".\"CC_WORKER_ID\",
-  \"COMPLETED_COMPACTIONS\".\"CC_START\",
-  \"COMPLETED_COMPACTIONS\".\"CC_END\",
-  \"COMPLETED_COMPACTIONS\".\"CC_RUN_AS\",
-  \"COMPLETED_COMPACTIONS\".\"CC_HIGHEST_WRITE_ID\",
-  \"COMPLETED_COMPACTIONS\".\"CC_HADOOP_JOB_ID\"
-FROM \"COMPLETED_COMPACTIONS\"
-"
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: SYS@COMPLETED_COMPACTIONS
-POSTHOOK: Output: database:sys
-PREHOOK: query: CREATE OR REPLACE VIEW `COMPACTIONS`
-(
-  `C_ID`,
-  `C_CATALOG`,
-  `C_DATABASE`,
-  `C_TABLE`,
-  `C_PARTITION`,
-  `C_TYPE`,
-  `C_STATE`,
-  `C_HOSTNAME`,
-  `C_WORKER_ID`,
-  `C_START`,
-  `C_DURATION`,
-  `C_HADOOP_JOB_ID`,
-  `C_RUN_AS`,
-  `C_HIGHEST_WRITE_ID`
-) AS
-SELECT
-  CC_ID,
-  'default',
-  CC_DATABASE,
-  CC_TABLE,
-  CC_PARTITION,
-  CASE WHEN CC_TYPE = 'i' THEN 'minor' WHEN CC_TYPE = 'a' THEN 'major' ELSE 'UNKNOWN' END,
-  CASE WHEN CC_STATE = 'f' THEN 'failed' WHEN CC_STATE = 's' THEN 'succeeded' WHEN CC_STATE = 'a' THEN 'attempted' ELSE 'UNKNOWN' END,
-  CASE WHEN CC_WORKER_ID IS NULL THEN cast (null as string) ELSE split(CC_WORKER_ID,"-")[0] END,
-  CASE WHEN CC_WORKER_ID IS NULL THEN cast (null as string) ELSE split(CC_WORKER_ID,"-")[1] END,
-  CC_START,
-  CASE WHEN CC_END IS NULL THEN cast (null as string) ELSE CC_END-CC_START END,
-  CC_HADOOP_JOB_ID,
-  CC_RUN_AS,
-  CC_HIGHEST_WRITE_ID
-FROM COMPLETED_COMPACTIONS
-UNION ALL
-SELECT
-  CQ_ID,
-  'default',
-  CQ_DATABASE,
-  CQ_TABLE,
-  CQ_PARTITION,
-  CASE WHEN CQ_TYPE = 'i' THEN 'minor' WHEN CQ_TYPE = 'a' THEN 'major' ELSE 'UNKNOWN' END,
-  CASE WHEN CQ_STATE = 'i' THEN 'initiated' WHEN CQ_STATE = 'w' THEN 'working' WHEN CQ_STATE = 'r' THEN 'ready for cleaning' ELSE 'UNKNOWN' END,
-  CASE WHEN CQ_WORKER_ID IS NULL THEN NULL ELSE split(CQ_WORKER_ID,"-")[0] END,
-  CASE WHEN CQ_WORKER_ID IS NULL THEN NULL ELSE split(CQ_WORKER_ID,"-")[1] END,
-  CQ_START,
-  cast (null as string),
-  CQ_HADOOP_JOB_ID,
-  CQ_RUN_AS,
-  CQ_HIGHEST_WRITE_ID
-FROM COMPACTION_QUEUE
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@compaction_queue
-PREHOOK: Input: sys@completed_compactions
-PREHOOK: Output: SYS@COMPACTIONS
-PREHOOK: Output: database:sys
-POSTHOOK: query: CREATE OR REPLACE VIEW `COMPACTIONS`
-(
-  `C_ID`,
-  `C_CATALOG`,
-  `C_DATABASE`,
-  `C_TABLE`,
-  `C_PARTITION`,
-  `C_TYPE`,
-  `C_STATE`,
-  `C_HOSTNAME`,
-  `C_WORKER_ID`,
-  `C_START`,
-  `C_DURATION`,
-  `C_HADOOP_JOB_ID`,
-  `C_RUN_AS`,
-  `C_HIGHEST_WRITE_ID`
-) AS
-SELECT
-  CC_ID,
-  'default',
-  CC_DATABASE,
-  CC_TABLE,
-  CC_PARTITION,
-  CASE WHEN CC_TYPE = 'i' THEN 'minor' WHEN CC_TYPE = 'a' THEN 'major' ELSE 'UNKNOWN' END,
-  CASE WHEN CC_STATE = 'f' THEN 'failed' WHEN CC_STATE = 's' THEN 'succeeded' WHEN CC_STATE = 'a' THEN 'attempted' ELSE 'UNKNOWN' END,
-  CASE WHEN CC_WORKER_ID IS NULL THEN cast (null as string) ELSE split(CC_WORKER_ID,"-")[0] END,
-  CASE WHEN CC_WORKER_ID IS NULL THEN cast (null as string) ELSE split(CC_WORKER_ID,"-")[1] END,
-  CC_START,
-  CASE WHEN CC_END IS NULL THEN cast (null as string) ELSE CC_END-CC_START END,
-  CC_HADOOP_JOB_ID,
-  CC_RUN_AS,
-  CC_HIGHEST_WRITE_ID
-FROM COMPLETED_COMPACTIONS
-UNION ALL
-SELECT
-  CQ_ID,
-  'default',
-  CQ_DATABASE,
-  CQ_TABLE,
-  CQ_PARTITION,
-  CASE WHEN CQ_TYPE = 'i' THEN 'minor' WHEN CQ_TYPE = 'a' THEN 'major' ELSE 'UNKNOWN' END,
-  CASE WHEN CQ_STATE = 'i' THEN 'initiated' WHEN CQ_STATE = 'w' THEN 'working' WHEN CQ_STATE = 'r' THEN 'ready for cleaning' ELSE 'UNKNOWN' END,
-  CASE WHEN CQ_WORKER_ID IS NULL THEN NULL ELSE split(CQ_WORKER_ID,"-")[0] END,
-  CASE WHEN CQ_WORKER_ID IS NULL THEN NULL ELSE split(CQ_WORKER_ID,"-")[1] END,
-  CQ_START,
-  cast (null as string),
-  CQ_HADOOP_JOB_ID,
-  CQ_RUN_AS,
-  CQ_HIGHEST_WRITE_ID
-FROM COMPACTION_QUEUE
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@compaction_queue
-POSTHOOK: Input: sys@completed_compactions
-POSTHOOK: Output: SYS@COMPACTIONS
-POSTHOOK: Output: database:sys
-POSTHOOK: Lineage: COMPACTIONS.c_catalog EXPRESSION []
-POSTHOOK: Lineage: COMPACTIONS.c_database EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_database, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_database, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_duration EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_end, type:bigint, comment:from deserializer), (completed_compactions)completed_compactions.FieldSchema(name:cc_start, type:bigint, comment:from deserializer), ]
-#### A masked pattern was here ####
-POSTHOOK: Lineage: COMPACTIONS.c_highest_write_id EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_highest_write_id, type:bigint, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_highest_write_id, type:bigint, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_hostname EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_worker_id, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_worker_id, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_id EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_id, type:bigint, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_id, type:bigint, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_partition EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_partition, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_partition, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_run_as EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_run_as, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_run_as, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_start EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_start, type:bigint, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_start, type:bigint, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_state EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_state, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_state, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_table EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_table, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_table, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_type EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_type, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_type, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COMPACTIONS.c_worker_id EXPRESSION [(completed_compactions)completed_compactions.FieldSchema(name:cc_worker_id, type:string, comment:from deserializer), (compaction_queue)compaction_queue.FieldSchema(name:cq_worker_id, type:string, comment:from deserializer), ]
-PREHOOK: query: CREATE DATABASE IF NOT EXISTS INFORMATION_SCHEMA
-PREHOOK: type: CREATEDATABASE
-PREHOOK: Output: database:INFORMATION_SCHEMA
-POSTHOOK: query: CREATE DATABASE IF NOT EXISTS INFORMATION_SCHEMA
-POSTHOOK: type: CREATEDATABASE
-POSTHOOK: Output: database:INFORMATION_SCHEMA
-PREHOOK: query: USE INFORMATION_SCHEMA
-PREHOOK: type: SWITCHDATABASE
-PREHOOK: Input: database:information_schema
-POSTHOOK: query: USE INFORMATION_SCHEMA
-POSTHOOK: type: SWITCHDATABASE
-POSTHOOK: Input: database:information_schema
-PREHOOK: query: CREATE OR REPLACE VIEW `SCHEMATA`
-(
-  `CATALOG_NAME`,
-  `SCHEMA_NAME`,
-  `SCHEMA_OWNER`,
-  `DEFAULT_CHARACTER_SET_CATALOG`,
-  `DEFAULT_CHARACTER_SET_SCHEMA`,
-  `DEFAULT_CHARACTER_SET_NAME`,
-  `SQL_PATH`
-) AS
-SELECT DISTINCT
-  'default',
-  D.`NAME`,
-  D.`OWNER_NAME`,
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  `DB_LOCATION_URI`
-FROM
-  `sys`.`DBS` D LEFT JOIN `sys`.`TBLS` T ON (D.`DB_ID` = T.`DB_ID`)
-                LEFT JOIN `sys`.`TBL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND current_authorizer() = P.`AUTHORIZER`
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@dbs
-PREHOOK: Input: sys@tbl_privs
-PREHOOK: Input: sys@tbls
-PREHOOK: Output: INFORMATION_SCHEMA@SCHEMATA
-PREHOOK: Output: database:information_schema
-POSTHOOK: query: CREATE OR REPLACE VIEW `SCHEMATA`
-(
-  `CATALOG_NAME`,
-  `SCHEMA_NAME`,
-  `SCHEMA_OWNER`,
-  `DEFAULT_CHARACTER_SET_CATALOG`,
-  `DEFAULT_CHARACTER_SET_SCHEMA`,
-  `DEFAULT_CHARACTER_SET_NAME`,
-  `SQL_PATH`
-) AS
-SELECT DISTINCT
-  'default',
-  D.`NAME`,
-  D.`OWNER_NAME`,
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  `DB_LOCATION_URI`
-FROM
-  `sys`.`DBS` D LEFT JOIN `sys`.`TBLS` T ON (D.`DB_ID` = T.`DB_ID`)
-                LEFT JOIN `sys`.`TBL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND current_authorizer() = P.`AUTHORIZER`
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@dbs
-POSTHOOK: Input: sys@tbl_privs
-POSTHOOK: Input: sys@tbls
-POSTHOOK: Output: INFORMATION_SCHEMA@SCHEMATA
-POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: SCHEMATA.catalog_name SIMPLE []
-POSTHOOK: Lineage: SCHEMATA.default_character_set_catalog EXPRESSION []
-POSTHOOK: Lineage: SCHEMATA.default_character_set_name EXPRESSION []
-POSTHOOK: Lineage: SCHEMATA.default_character_set_schema EXPRESSION []
-POSTHOOK: Lineage: SCHEMATA.schema_name SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
-#### A masked pattern was here ####
-POSTHOOK: Lineage: SCHEMATA.sql_path SIMPLE [(dbs)d.FieldSchema(name:db_location_uri, type:string, comment:from deserializer), ]
-PREHOOK: query: CREATE OR REPLACE VIEW `TABLES`
-(
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `TABLE_TYPE`,
-  `SELF_REFERENCING_COLUMN_NAME`,
-  `REFERENCE_GENERATION`,
-  `USER_DEFINED_TYPE_CATALOG`,
-  `USER_DEFINED_TYPE_SCHEMA`,
-  `USER_DEFINED_TYPE_NAME`,
-  `IS_INSERTABLE_INTO`,
-  `IS_TYPED`,
-  `COMMIT_ACTION`
-) AS
-SELECT DISTINCT
-  'default',
-  D.NAME,
-  T.TBL_NAME,
-  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'VIEW', 'BASE_TABLE'),
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'NO', 'YES'),
-  'NO',
-  cast(null as string)
-FROM
-  `sys`.`TBLS` T JOIN `sys`.`DBS` D ON (D.`DB_ID` = T.`DB_ID`)
-                 LEFT JOIN `sys`.`TBL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer()
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@dbs
-PREHOOK: Input: sys@tbl_privs
-PREHOOK: Input: sys@tbls
-PREHOOK: Output: INFORMATION_SCHEMA@TABLES
-PREHOOK: Output: database:information_schema
-POSTHOOK: query: CREATE OR REPLACE VIEW `TABLES`
-(
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `TABLE_TYPE`,
-  `SELF_REFERENCING_COLUMN_NAME`,
-  `REFERENCE_GENERATION`,
-  `USER_DEFINED_TYPE_CATALOG`,
-  `USER_DEFINED_TYPE_SCHEMA`,
-  `USER_DEFINED_TYPE_NAME`,
-  `IS_INSERTABLE_INTO`,
-  `IS_TYPED`,
-  `COMMIT_ACTION`
-) AS
-SELECT DISTINCT
-  'default',
-  D.NAME,
-  T.TBL_NAME,
-  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'VIEW', 'BASE_TABLE'),
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  cast(null as string),
-  IF(length(T.VIEW_ORIGINAL_TEXT) > 0, 'NO', 'YES'),
-  'NO',
-  cast(null as string)
-FROM
-  `sys`.`TBLS` T JOIN `sys`.`DBS` D ON (D.`DB_ID` = T.`DB_ID`)
-                 LEFT JOIN `sys`.`TBL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer()
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@dbs
-POSTHOOK: Input: sys@tbl_privs
-POSTHOOK: Input: sys@tbls
-POSTHOOK: Output: INFORMATION_SCHEMA@TABLES
-POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: TABLES.commit_action EXPRESSION []
-POSTHOOK: Lineage: TABLES.is_insertable_into EXPRESSION [(tbls)t.FieldSchema(name:view_original_text, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLES.is_typed SIMPLE []
-POSTHOOK: Lineage: TABLES.reference_generation EXPRESSION []
-POSTHOOK: Lineage: TABLES.self_referencing_column_name EXPRESSION []
-POSTHOOK: Lineage: TABLES.table_catalog SIMPLE []
-POSTHOOK: Lineage: TABLES.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLES.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLES.table_type EXPRESSION [(tbls)t.FieldSchema(name:view_original_text, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLES.user_defined_type_catalog EXPRESSION []
-POSTHOOK: Lineage: TABLES.user_defined_type_name EXPRESSION []
-POSTHOOK: Lineage: TABLES.user_defined_type_schema EXPRESSION []
-PREHOOK: query: CREATE OR REPLACE VIEW `TABLE_PRIVILEGES`
-(
-  `GRANTOR`,
-  `GRANTEE`,
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `PRIVILEGE_TYPE`,
-  `IS_GRANTABLE`,
-  `WITH_HIERARCHY`
-) AS
-SELECT DISTINCT
-  P.`GRANTOR`,
-  P.`PRINCIPAL_NAME`,
-  'default',
-  D.`NAME`,
-  T.`TBL_NAME`,
-  P.`TBL_PRIV`,
-  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
-  'NO'
-FROM
-  `sys`.`TBL_PRIVS` P JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
-                      JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
-                      LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` = P2.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR
-  (P2.`TBL_ID` IS NOT NULL AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
-  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
-  AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER` = current_authorizer() AND P2.`AUTHORIZER` = current_authorizer())
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@dbs
-PREHOOK: Input: sys@tbl_privs
-PREHOOK: Input: sys@tbls
-PREHOOK: Output: INFORMATION_SCHEMA@TABLE_PRIVILEGES
-PREHOOK: Output: database:information_schema
-POSTHOOK: query: CREATE OR REPLACE VIEW `TABLE_PRIVILEGES`
-(
-  `GRANTOR`,
-  `GRANTEE`,
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `PRIVILEGE_TYPE`,
-  `IS_GRANTABLE`,
-  `WITH_HIERARCHY`
-) AS
-SELECT DISTINCT
-  P.`GRANTOR`,
-  P.`PRINCIPAL_NAME`,
-  'default',
-  D.`NAME`,
-  T.`TBL_NAME`,
-  P.`TBL_PRIV`,
-  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
-  'NO'
-FROM
-  `sys`.`TBL_PRIVS` P JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
-                      JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
-                      LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` = P2.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR
-  (P2.`TBL_ID` IS NOT NULL AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
-  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
-  AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER` = current_authorizer() AND P2.`AUTHORIZER` = current_authorizer())
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@dbs
-POSTHOOK: Input: sys@tbl_privs
-POSTHOOK: Input: sys@tbls
-POSTHOOK: Output: INFORMATION_SCHEMA@TABLE_PRIVILEGES
-POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: TABLE_PRIVILEGES.grantee SIMPLE [(tbl_privs)p.FieldSchema(name:principal_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_PRIVILEGES.grantor SIMPLE [(tbl_privs)p.FieldSchema(name:grantor, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_PRIVILEGES.is_grantable EXPRESSION [(tbl_privs)p.FieldSchema(name:grant_option, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_PRIVILEGES.privilege_type SIMPLE [(tbl_privs)p.FieldSchema(name:tbl_priv, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_PRIVILEGES.table_catalog SIMPLE []
-POSTHOOK: Lineage: TABLE_PRIVILEGES.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_PRIVILEGES.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: TABLE_PRIVILEGES.with_hierarchy SIMPLE []
-PREHOOK: query: CREATE OR REPLACE VIEW `COLUMNS`
-(
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `COLUMN_NAME`,
-  `ORDINAL_POSITION`,
-  `COLUMN_DEFAULT`,
-  `IS_NULLABLE`,
-  `DATA_TYPE`,
-  `CHARACTER_MAXIMUM_LENGTH`,
-  `CHARACTER_OCTET_LENGTH`,
-  `NUMERIC_PRECISION`,
-  `NUMERIC_PRECISION_RADIX`,
-  `NUMERIC_SCALE`,
-  `DATETIME_PRECISION`,
-  `INTERVAL_TYPE`,
-  `INTERVAL_PRECISION`,
-  `CHARACTER_SET_CATALOG`,
-  `CHARACTER_SET_SCHEMA`,
-  `CHARACTER_SET_NAME`,
-  `COLLATION_CATALOG`,
-  `COLLATION_SCHEMA`,
-  `COLLATION_NAME`,
-  `UDT_CATALOG`,
-  `UDT_SCHEMA`,
-  `UDT_NAME`,
-  `SCOPE_CATALOG`,
-  `SCOPE_SCHEMA`,
-  `SCOPE_NAME`,
-  `MAXIMUM_CARDINALITY`,
-  `DTD_IDENTIFIER`,
-  `IS_SELF_REFERENCING`,
-  `IS_IDENTITY`,
-  `IDENTITY_GENERATION`,
-  `IDENTITY_START`,
-  `IDENTITY_INCREMENT`,
-  `IDENTITY_MAXIMUM`,
-  `IDENTITY_MINIMUM`,
-  `IDENTITY_CYCLE`,
-  `IS_GENERATED`,
-  `GENERATION_EXPRESSION`,
-  `IS_SYSTEM_TIME_PERIOD_START`,
-  `IS_SYSTEM_TIME_PERIOD_END`,
-  `SYSTEM_TIME_PERIOD_TIMESTAMP_GENERATION`,
-  `IS_UPDATABLE`,
-  `DECLARED_DATA_TYPE`,
-  `DECLARED_NUMERIC_PRECISION`,
-  `DECLARED_NUMERIC_SCALE`
-) AS
-SELECT DISTINCT
-  'default',
-  D.NAME,
-  T.TBL_NAME,
-  C.COLUMN_NAME,
-  C.INTEGER_IDX,
-  cast (null as string),
-  'YES',
-  C.TYPE_NAME as TYPE_NAME,
-  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+),(\\d+)',2)
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+),(\\d+)',2)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'date' THEN 0
-       WHEN lower(C.TYPE_NAME) = 'timestamp' THEN 9
-       ELSE null END,
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  C.CD_ID,
-  'NO',
-  'NO',
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  'NEVER',
-  cast (null as string),
-  'NO',
-  'NO',
-  cast (null as string),
-  'YES',
-  C.TYPE_NAME as DECLARED_DATA_TYPE,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
-       ELSE null END
-FROM
-  `sys`.`COLUMNS_V2` C JOIN `sys`.`SDS` S ON (C.`CD_ID` = S.`CD_ID`)
-                       JOIN `sys`.`TBLS` T ON (S.`SD_ID` = T.`SD_ID`)
-                       JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
-                       LEFT JOIN `sys`.`TBL_COL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_COL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer()
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@columns_v2
-PREHOOK: Input: sys@dbs
-PREHOOK: Input: sys@sds
-PREHOOK: Input: sys@tbl_col_privs
-PREHOOK: Input: sys@tbls
-PREHOOK: Output: INFORMATION_SCHEMA@COLUMNS
-PREHOOK: Output: database:information_schema
-POSTHOOK: query: CREATE OR REPLACE VIEW `COLUMNS`
-(
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `COLUMN_NAME`,
-  `ORDINAL_POSITION`,
-  `COLUMN_DEFAULT`,
-  `IS_NULLABLE`,
-  `DATA_TYPE`,
-  `CHARACTER_MAXIMUM_LENGTH`,
-  `CHARACTER_OCTET_LENGTH`,
-  `NUMERIC_PRECISION`,
-  `NUMERIC_PRECISION_RADIX`,
-  `NUMERIC_SCALE`,
-  `DATETIME_PRECISION`,
-  `INTERVAL_TYPE`,
-  `INTERVAL_PRECISION`,
-  `CHARACTER_SET_CATALOG`,
-  `CHARACTER_SET_SCHEMA`,
-  `CHARACTER_SET_NAME`,
-  `COLLATION_CATALOG`,
-  `COLLATION_SCHEMA`,
-  `COLLATION_NAME`,
-  `UDT_CATALOG`,
-  `UDT_SCHEMA`,
-  `UDT_NAME`,
-  `SCOPE_CATALOG`,
-  `SCOPE_SCHEMA`,
-  `SCOPE_NAME`,
-  `MAXIMUM_CARDINALITY`,
-  `DTD_IDENTIFIER`,
-  `IS_SELF_REFERENCING`,
-  `IS_IDENTITY`,
-  `IDENTITY_GENERATION`,
-  `IDENTITY_START`,
-  `IDENTITY_INCREMENT`,
-  `IDENTITY_MAXIMUM`,
-  `IDENTITY_MINIMUM`,
-  `IDENTITY_CYCLE`,
-  `IS_GENERATED`,
-  `GENERATION_EXPRESSION`,
-  `IS_SYSTEM_TIME_PERIOD_START`,
-  `IS_SYSTEM_TIME_PERIOD_END`,
-  `SYSTEM_TIME_PERIOD_TIMESTAMP_GENERATION`,
-  `IS_UPDATABLE`,
-  `DECLARED_DATA_TYPE`,
-  `DECLARED_NUMERIC_PRECISION`,
-  `DECLARED_NUMERIC_SCALE`
-) AS
-SELECT DISTINCT
-  'default',
-  D.NAME,
-  T.TBL_NAME,
-  C.COLUMN_NAME,
-  C.INTEGER_IDX,
-  cast (null as string),
-  'YES',
-  C.TYPE_NAME as TYPE_NAME,
-  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       WHEN lower(C.TYPE_NAME) like 'char%'    THEN cast(regexp_extract(upper(C.TYPE_NAME),    '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as int)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+),(\\d+)',2)
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+),(\\d+)',2)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'date' THEN 0
-       WHEN lower(C.TYPE_NAME) = 'timestamp' THEN 9
-       ELSE null END,
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  C.CD_ID,
-  'NO',
-  'NO',
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  cast (null as string),
-  'NEVER',
-  cast (null as string),
-  'NO',
-  'NO',
-  cast (null as string),
-  'YES',
-  C.TYPE_NAME as DECLARED_DATA_TYPE,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 23
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 53
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
-       ELSE null END,
-  CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'int' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
-       WHEN lower(C.TYPE_NAME) = 'float' THEN 2
-       WHEN lower(C.TYPE_NAME) = 'double' THEN 2
-       WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
-       WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
-       ELSE null END
-FROM
-  `sys`.`COLUMNS_V2` C JOIN `sys`.`SDS` S ON (C.`CD_ID` = S.`CD_ID`)
-                       JOIN `sys`.`TBLS` T ON (S.`SD_ID` = T.`SD_ID`)
-                       JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
-                       LEFT JOIN `sys`.`TBL_COL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_COL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer()
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@columns_v2
-POSTHOOK: Input: sys@dbs
-POSTHOOK: Input: sys@sds
-POSTHOOK: Input: sys@tbl_col_privs
-POSTHOOK: Input: sys@tbls
-POSTHOOK: Output: INFORMATION_SCHEMA@COLUMNS
-POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: COLUMNS.character_maximum_length EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.character_octet_length EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.character_set_catalog EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.character_set_name EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.character_set_schema EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.collation_catalog EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.collation_name EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.collation_schema EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.column_default EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.column_name SIMPLE [(columns_v2)c.FieldSchema(name:column_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.data_type SIMPLE [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.datetime_precision EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.declared_data_type SIMPLE [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.declared_numeric_precision EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.declared_numeric_scale EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.dtd_identifier SIMPLE [(columns_v2)c.FieldSchema(name:cd_id, type:bigint, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.generation_expression EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.identity_cycle EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.identity_generation EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.identity_increment EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.identity_maximum EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.identity_minimum EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.identity_start EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.interval_precision EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.interval_type EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.is_generated SIMPLE []
-POSTHOOK: Lineage: COLUMNS.is_identity SIMPLE []
-POSTHOOK: Lineage: COLUMNS.is_nullable SIMPLE []
-POSTHOOK: Lineage: COLUMNS.is_self_referencing SIMPLE []
-POSTHOOK: Lineage: COLUMNS.is_system_time_period_end SIMPLE []
-POSTHOOK: Lineage: COLUMNS.is_system_time_period_start SIMPLE []
-POSTHOOK: Lineage: COLUMNS.is_updatable SIMPLE []
-POSTHOOK: Lineage: COLUMNS.maximum_cardinality EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.numeric_precision EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.numeric_precision_radix EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.numeric_scale EXPRESSION [(columns_v2)c.FieldSchema(name:type_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.ordinal_position SIMPLE [(columns_v2)c.FieldSchema(name:integer_idx, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.scope_catalog EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.scope_name EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.scope_schema EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.system_time_period_timestamp_generation EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.table_catalog SIMPLE []
-POSTHOOK: Lineage: COLUMNS.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMNS.udt_catalog EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.udt_name EXPRESSION []
-POSTHOOK: Lineage: COLUMNS.udt_schema EXPRESSION []
-PREHOOK: query: CREATE OR REPLACE VIEW `COLUMN_PRIVILEGES`
-(
-  `GRANTOR`,
-  `GRANTEE`,
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `COLUMN_NAME`,
-  `PRIVILEGE_TYPE`,
-  `IS_GRANTABLE`
-) AS
-SELECT DISTINCT
-  P.`GRANTOR`,
-  P.`PRINCIPAL_NAME`,
-  'default',
-  D.`NAME`,
-  T.`TBL_NAME`,
-  P.`COLUMN_NAME`,
-  P.`TBL_COL_PRIV`,
-  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
-FROM
-  `sys`.`TBL_COL_PRIVS` P JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
-                          JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
-                          JOIN `sys`.`SDS` S ON (S.`SD_ID` = T.`SD_ID`)
-                          LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` = P2.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P2.`TBL_ID` IS NOT NULL
-  AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
-  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
-  AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer() AND P2.`AUTHORIZER`=current_authorizer()
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@dbs
-PREHOOK: Input: sys@sds
-PREHOOK: Input: sys@tbl_col_privs
-PREHOOK: Input: sys@tbl_privs
-PREHOOK: Input: sys@tbls
-PREHOOK: Output: INFORMATION_SCHEMA@COLUMN_PRIVILEGES
-PREHOOK: Output: database:information_schema
-POSTHOOK: query: CREATE OR REPLACE VIEW `COLUMN_PRIVILEGES`
-(
-  `GRANTOR`,
-  `GRANTEE`,
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `COLUMN_NAME`,
-  `PRIVILEGE_TYPE`,
-  `IS_GRANTABLE`
-) AS
-SELECT DISTINCT
-  P.`GRANTOR`,
-  P.`PRINCIPAL_NAME`,
-  'default',
-  D.`NAME`,
-  T.`TBL_NAME`,
-  P.`COLUMN_NAME`,
-  P.`TBL_COL_PRIV`,
-  IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
-FROM
-  `sys`.`TBL_COL_PRIVS` P JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
-                          JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
-                          JOIN `sys`.`SDS` S ON (S.`SD_ID` = T.`SD_ID`)
-                          LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` = P2.`TBL_ID`)
-WHERE
-  NOT restrict_information_schema() OR P2.`TBL_ID` IS NOT NULL
-  AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
-  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
-  AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer() AND P2.`AUTHORIZER`=current_authorizer()
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@dbs
-POSTHOOK: Input: sys@sds
-POSTHOOK: Input: sys@tbl_col_privs
-POSTHOOK: Input: sys@tbl_privs
-POSTHOOK: Input: sys@tbls
-POSTHOOK: Output: INFORMATION_SCHEMA@COLUMN_PRIVILEGES
-POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.column_name SIMPLE [(tbl_col_privs)p.FieldSchema(name:column_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.grantee SIMPLE [(tbl_col_privs)p.FieldSchema(name:principal_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.grantor SIMPLE [(tbl_col_privs)p.FieldSchema(name:grantor, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.is_grantable EXPRESSION [(tbl_col_privs)p.FieldSchema(name:grant_option, type:int, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.privilege_type SIMPLE [(tbl_col_privs)p.FieldSchema(name:tbl_col_priv, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.table_catalog SIMPLE []
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
-PREHOOK: query: CREATE OR REPLACE VIEW `VIEWS`
-(
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `VIEW_DEFINITION`,
-  `CHECK_OPTION`,
-  `IS_UPDATABLE`,
-  `IS_INSERTABLE_INTO`,
-  `IS_TRIGGER_UPDATABLE`,
-  `IS_TRIGGER_DELETABLE`,
-  `IS_TRIGGER_INSERTABLE_INTO`
-) AS
-SELECT DISTINCT
-  'default',
-  D.NAME,
-  T.TBL_NAME,
-  T.VIEW_ORIGINAL_TEXT,
-  CAST(NULL as string),
-  false,
-  false,
-  false,
-  false,
-  false
-FROM
-  `sys`.`DBS` D JOIN `sys`.`TBLS` T ON (D.`DB_ID` = T.`DB_ID`)
-                LEFT JOIN `sys`.`TBL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  length(T.VIEW_ORIGINAL_TEXT) > 0
-  AND (NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer())
-PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@dbs
-PREHOOK: Input: sys@tbl_privs
-PREHOOK: Input: sys@tbls
-PREHOOK: Output: INFORMATION_SCHEMA@VIEWS
-PREHOOK: Output: database:information_schema
-POSTHOOK: query: CREATE OR REPLACE VIEW `VIEWS`
-(
-  `TABLE_CATALOG`,
-  `TABLE_SCHEMA`,
-  `TABLE_NAME`,
-  `VIEW_DEFINITION`,
-  `CHECK_OPTION`,
-  `IS_UPDATABLE`,
-  `IS_INSERTABLE_INTO`,
-  `IS_TRIGGER_UPDATABLE`,
-  `IS_TRIGGER_DELETABLE`,
-  `IS_TRIGGER_INSERTABLE_INTO`
-) AS
-SELECT DISTINCT
-  'default',
-  D.NAME,
-  T.TBL_NAME,
-  T.VIEW_ORIGINAL_TEXT,
-  CAST(NULL as string),
-  false,
-  false,
-  false,
-  false,
-  false
-FROM
-  `sys`.`DBS` D JOIN `sys`.`TBLS` T ON (D.`DB_ID` = T.`DB_ID`)
-                LEFT JOIN `sys`.`TBL_PRIVS` P ON (T.`TBL_ID` = P.`TBL_ID`)
-WHERE
-  length(T.VIEW_ORIGINAL_TEXT) > 0
-  AND (NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
-  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer())
-POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@dbs
-POSTHOOK: Input: sys@tbl_privs
-POSTHOOK: Input: sys@tbls
-POSTHOOK: Output: INFORMATION_SCHEMA@VIEWS
-POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: VIEWS.check_option EXPRESSION []
-POSTHOOK: Lineage: VIEWS.is_insertable_into SIMPLE []
-POSTHOOK: Lineage: VIEWS.is_trigger_deletable SIMPLE []
-POSTHOOK: Lineage: VIEWS.is_trigger_insertable_into SIMPLE []
-POSTHOOK: Lineage: VIEWS.is_trigger_updatable SIMPLE []
-POSTHOOK: Lineage: VIEWS.is_updatable SIMPLE []
-POSTHOOK: Lineage: VIEWS.table_catalog SIMPLE []
-POSTHOOK: Lineage: VIEWS.table_name SIMPLE [(tbls)t.FieldSchema(name:tbl_name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: VIEWS.table_schema SIMPLE [(dbs)d.FieldSchema(name:name, type:string, comment:from deserializer), ]
-POSTHOOK: Lineage: VIEWS.view_definition SIMPLE [(tbls)t.FieldSchema(name:view_original_text, type:string, comment:from deserializer), ]
-PREHOOK: query: CREATE OR REPLACE VIEW `COMPACTIONS`
-(
-  `C_ID`,
-  `C_CATALOG`,
-  `C_DATABASE`,
-  `C_TABLE`,
-  `C_PARTITION`,
-  `C_TYPE`,
-  `C_STATE`,
-  `C_HOSTNAME`,
-  `C_WORKER_ID`,
-  `C_START`,
-  `C_DURATION`,
-  `C_HADOOP_JOB_ID`,
-  `C_RUN_AS`,
-  `C_HIGHEST_WRITE_ID`
-) AS
-SELECT DISTINCT
-  C_ID,
-  C_CATALOG,
-  C_DATABASE,
-  C_TABLE,
... 1963 lines suppressed ...