You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2017/11/27 22:28:06 UTC

[12/13] hive git commit: HIVE-17954 : Implement pool, user, group and trigger to pool management API's (Harish Jaiprakash, reviewed by Sergey Shelukhin)

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
index b6298da..43e6e33 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
@@ -20,92 +20,51 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 
-import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-@Explain(displayName = "Alter Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+@Explain(displayName = "Alter Resource plans",
+    explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class AlterResourcePlanDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = -3514685833183437279L;
 
+  private WMResourcePlan resourcePlan;
   private String rpName;
-  private String newName;
-  private Integer queryParallelism;
-  private WMResourcePlanStatus status;
   private boolean validate;
-  private String defaultPoolPath;
   private boolean isEnableActivate;
 
   public AlterResourcePlanDesc() {}
 
-  private AlterResourcePlanDesc(String rpName, String newName, Integer queryParallelism,
-      WMResourcePlanStatus status, boolean validate, String defaultPoolPath) {
+  public AlterResourcePlanDesc(WMResourcePlan resourcePlan, String rpName, boolean validate,
+      boolean isEnableActivate) {
+    this.resourcePlan = resourcePlan;
     this.rpName = rpName;
-    this.newName = newName;
-    this.queryParallelism = queryParallelism;
-    this.status = status;
     this.validate = validate;
-    this.defaultPoolPath = defaultPoolPath;
+    this.isEnableActivate = isEnableActivate;
   }
 
-  public static AlterResourcePlanDesc createSet(String rpName) {
-    return new AlterResourcePlanDesc(rpName, null, null, null, false, null);
+  @Explain(displayName="resourcePlan",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public WMResourcePlan getResourcePlan() {
+    return resourcePlan;
   }
 
-  public static AlterResourcePlanDesc createChangeStatus(
-      String rpName, WMResourcePlanStatus status) {
-    return new AlterResourcePlanDesc(rpName, null, null, status, false, null);
+  public void setResourcePlan(WMResourcePlan resourcePlan) {
+    this.resourcePlan = resourcePlan;
   }
 
-  public static AlterResourcePlanDesc createValidatePlan(String rpName) {
-    return new AlterResourcePlanDesc(rpName, null, null, null, true, null);
-  }
-
-  @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getRpName() {
+  @Explain(displayName="resourcePlanName",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
     return rpName;
   }
 
-  public void setRpName(String rpName) {
+  public void setResourcePlanName(String rpName) {
     this.rpName = rpName;
   }
 
-  @Explain(displayName="newResourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getNewName() {
-    return newName;
-  }
-
-  public void setNewName(String newName) {
-    this.newName = newName;
-  }
-
-  @Explain(displayName="Default pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getDefaultPoolPath() {
-    return defaultPoolPath;
-  }
-
-  public void setDefaultPoolPath(String defaultPoolPath) {
-    this.defaultPoolPath = defaultPoolPath;
-  }
-
-  @Explain(displayName="queryParallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public Integer getQueryParallelism() {
-    return queryParallelism;
-  }
-
-  public void setQueryParallelism(Integer queryParallelism) {
-    this.queryParallelism = queryParallelism;
-  }
-
-  @Explain(displayName="status", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public WMResourcePlanStatus getStatus() {
-    return status;
-  }
-
-  public void setStatus(WMResourcePlanStatus status) {
-    this.status = status;
-  }
-
-  @Explain(displayName="shouldValidate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  @Explain(displayName="shouldValidate",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public boolean shouldValidate() {
     return validate;
   }
@@ -114,11 +73,11 @@ public class AlterResourcePlanDesc extends DDLDesc implements Serializable {
     this.validate = validate;
   }
 
-  public void setIsEnableActivate(boolean b) {
-    this.isEnableActivate = b;
-  }
-
   public boolean isEnableActivate() {
     return isEnableActivate;
   }
+
+  public void setIsEnableActivate(boolean b) {
+    this.isEnableActivate = b;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterWMTriggerDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterWMTriggerDesc.java
index 94414ef..11d448b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterWMTriggerDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterWMTriggerDesc.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 @Explain(displayName="Alter WM Trigger",
@@ -27,58 +28,21 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class AlterWMTriggerDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = -2105736261687539210L;
 
-  private String rpName;
-  private String triggerName;
-  private String triggerExpression;
-  private String actionExpression;
+  private WMTrigger trigger;
 
   public AlterWMTriggerDesc() {}
 
-  public AlterWMTriggerDesc(String rpName, String triggerName, String triggerExpression,
-      String actionExpression) {
-    this.rpName = rpName;
-    this.triggerName = triggerName;
-    this.triggerExpression = triggerExpression;
-    this.actionExpression = actionExpression;
+  public AlterWMTriggerDesc(WMTrigger trigger) {
+    this.trigger = trigger;
   }
 
-  @Explain(displayName="resourcePlanName",
+  @Explain(displayName="trigger",
       explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getRpName() {
-    return rpName;
+  public WMTrigger getTrigger() {
+    return trigger;
   }
 
-  public void setRpName(String rpName) {
-    this.rpName = rpName;
-  }
-
-  @Explain(displayName="triggerName",
-      explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getTriggerName() {
-    return triggerName;
-  }
-
-  public void setTriggerName(String triggerName) {
-    this.triggerName = triggerName;
-  }
-
-  @Explain(displayName="triggerExpression",
-      explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getTriggerExpression() {
-    return triggerExpression;
-  }
-
-  public void setTriggerExpression(String triggerExpression) {
-    this.triggerExpression = triggerExpression;
-  }
-
-  @Explain(displayName="actionExpression",
-      explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getActionExpression() {
-    return actionExpression;
-  }
-
-  public void setActionExpression(String actionExpression) {
-    this.actionExpression = actionExpression;
+  public void setTrigger(WMTrigger trigger) {
+    this.trigger = trigger;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMMappingDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMMappingDesc.java
new file mode 100644
index 0000000..3d5c4a5
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMMappingDesc.java
@@ -0,0 +1,41 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Create/Alter Mapping",
+    explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateOrAlterWMMappingDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = -442968568922083053L;
+
+  private WMMapping mapping;
+  private boolean update;
+
+  public CreateOrAlterWMMappingDesc() {}
+
+  public CreateOrAlterWMMappingDesc(WMMapping mapping, boolean update) {
+    this.mapping = mapping;
+    this.update = update;
+  }
+
+  @Explain(displayName = "mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public WMMapping getMapping() {
+    return mapping;
+  }
+
+  public void setMapping(WMMapping mapping) {
+    this.mapping = mapping;
+  }
+
+  @Explain(displayName = "update",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isUpdate() {
+    return update;
+  }
+
+  public void setUpdate(boolean update) {
+    this.update = update;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMPoolDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMPoolDesc.java
new file mode 100644
index 0000000..b6aa3f1
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrAlterWMPoolDesc.java
@@ -0,0 +1,50 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.metastore.api.WMPool;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Create/Alter Pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateOrAlterWMPoolDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = 4872940135771213510L;
+
+  private WMPool pool;
+  private String poolPath;
+  private boolean update;
+
+  public CreateOrAlterWMPoolDesc() {}
+
+  public CreateOrAlterWMPoolDesc(WMPool pool, String poolPath, boolean update) {
+    this.pool = pool;
+    this.poolPath = poolPath;
+    this.update = update;
+  }
+
+  @Explain(displayName="pool", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public WMPool getPool() {
+    return pool;
+  }
+
+  public void setPool(WMPool pool) {
+    this.pool = pool;
+  }
+
+  @Explain(displayName="poolPath", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  public void setPoolPath(String poolPath) {
+    this.poolPath = poolPath;
+  }
+
+  @Explain(displayName="isUpdate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isUpdate() {
+    return update;
+  }
+
+  public void setUpdate(boolean update) {
+    this.update = update;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrDropTriggerToPoolMappingDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrDropTriggerToPoolMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrDropTriggerToPoolMappingDesc.java
new file mode 100644
index 0000000..e1f912f
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateOrDropTriggerToPoolMappingDesc.java
@@ -0,0 +1,66 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Create/Drop Trigger to pool mappings",
+    explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateOrDropTriggerToPoolMappingDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = 383046258694558029L;
+
+  private String resourcePlanName;
+  private String triggerName;
+  private String poolPath;
+  private boolean drop;
+
+  public CreateOrDropTriggerToPoolMappingDesc() {}
+
+  public CreateOrDropTriggerToPoolMappingDesc(String resourcePlanName, String triggerName,
+      String poolPath, boolean drop) {
+    this.resourcePlanName = resourcePlanName;
+    this.triggerName = triggerName;
+    this.poolPath = poolPath;
+    this.drop = drop;
+  }
+
+  @Explain(displayName = "resourcePlanName",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  public void setResourcePlanName(String resourcePlanName) {
+    this.resourcePlanName = resourcePlanName;
+  }
+
+  @Explain(displayName = "triggerName",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getTriggerName() {
+    return triggerName;
+  }
+
+  public void setTriggerName(String triggerName) {
+    this.triggerName = triggerName;
+  }
+
+  @Explain(displayName = "poolPath",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  public void setPoolPath(String poolPath) {
+    this.poolPath = poolPath;
+  }
+
+  @Explain(displayName = "drop or create",
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean shouldDrop() {
+    return drop;
+  }
+
+  public void setDrop(boolean drop) {
+    this.drop = drop;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
index 348e315..efdd05c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
@@ -20,32 +20,28 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 @Explain(displayName = "Create ResourcePlan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
 public class CreateResourcePlanDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = -3492803425541479414L;
 
-  private static final long serialVersionUID = -3649343104271794404L;
-
-  private String planName;
-  private Integer queryParallelism;
+  private WMResourcePlan resourcePlan;
 
   // For serialization only.
   public CreateResourcePlanDesc() {
   }
 
   public CreateResourcePlanDesc(String planName, Integer queryParallelism) {
-    this.planName = planName;
-    this.queryParallelism = queryParallelism;
-  }
-
-  @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getName() {
-    return planName;
+    resourcePlan = new WMResourcePlan(planName);
+    if (queryParallelism != null) {
+      resourcePlan.setQueryParallelism(queryParallelism);
+    }
   }
 
-  @Explain(displayName="queryParallelism")
-  public Integer getQueryParallelism() {
-    return queryParallelism;
+  @Explain(displayName="resourcePlan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public WMResourcePlan getResourcePlan() {
+    return resourcePlan;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateWMTriggerDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateWMTriggerDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateWMTriggerDesc.java
index 92eaefd..c1dcb3c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateWMTriggerDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateWMTriggerDesc.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
 
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 @Explain(displayName="Create WM Trigger",
@@ -27,58 +28,21 @@ import org.apache.hadoop.hive.ql.plan.Explain.Level;
 public class CreateWMTriggerDesc extends DDLDesc implements Serializable {
   private static final long serialVersionUID = 1705317739121300923L;
 
-  private String rpName;
-  private String triggerName;
-  private String triggerExpression;
-  private String actionExpression;
+  private WMTrigger trigger;
 
   public CreateWMTriggerDesc() {}
 
-  public CreateWMTriggerDesc(String rpName, String triggerName, String triggerExpression,
-      String actionExpression) {
-    this.rpName = rpName;
-    this.triggerName = triggerName;
-    this.triggerExpression = triggerExpression;
-    this.actionExpression = actionExpression;
+  public CreateWMTriggerDesc(WMTrigger trigger) {
+    this.trigger = trigger;
   }
 
-  @Explain(displayName="resourcePlanName",
+  @Explain(displayName="trigger",
       explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getRpName() {
-    return rpName;
+  public WMTrigger getTrigger() {
+    return trigger;
   }
 
-  public void setRpName(String rpName) {
-    this.rpName = rpName;
-  }
-
-  @Explain(displayName="triggerName",
-      explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getTriggerName() {
-    return triggerName;
-  }
-
-  public void setTriggerName(String triggerName) {
-    this.triggerName = triggerName;
-  }
-
-  @Explain(displayName="triggerExpression",
-      explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getTriggerExpression() {
-    return triggerExpression;
-  }
-
-  public void setTriggerExpression(String triggerExpression) {
-    this.triggerExpression = triggerExpression;
-  }
-
-  @Explain(displayName="actionExpression",
-      explainLevels={ Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public String getActionExpression() {
-    return actionExpression;
-  }
-
-  public void setActionExpression(String actionExpression) {
-    this.actionExpression = actionExpression;
+  public void setTrigger(WMTrigger trigger) {
+    this.trigger = trigger;
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
index a9b39be..eb19ab0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
@@ -96,6 +96,14 @@ public class DDLWork implements Serializable {
   private AlterWMTriggerDesc alterWMTriggerDesc;
   private DropWMTriggerDesc dropWMTriggerDesc;
 
+  private CreateOrAlterWMPoolDesc wmPoolDesc;
+  private DropWMPoolDesc dropWMPoolDesc;
+
+  private CreateOrAlterWMMappingDesc wmMappingDesc;
+  private DropWMMappingDesc dropWMMappingDesc;
+
+  private CreateOrDropTriggerToPoolMappingDesc triggerToPoolMappingDesc;
+
   boolean needLock = false;
 
   /**
@@ -582,31 +590,61 @@ public class DDLWork implements Serializable {
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       DropResourcePlanDesc dropResourcePlanDesc) {
     this(inputs, outputs);
-    this.setDropResourcePlanDesc(dropResourcePlanDesc);
+    this.dropResourcePlanDesc = dropResourcePlanDesc;
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       AlterResourcePlanDesc alterResourcePlanDesc) {
     this(inputs, outputs);
-    this.setAlterResourcePlanDesc(alterResourcePlanDesc);
+    this.alterResourcePlanDesc = alterResourcePlanDesc;
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       CreateWMTriggerDesc createWMTriggerDesc) {
     this(inputs, outputs);
-    this.setCreateWMTriggerDesc(createWMTriggerDesc);
+    this.createWMTriggerDesc = createWMTriggerDesc;
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       AlterWMTriggerDesc alterWMTriggerDesc) {
     this(inputs, outputs);
-    this.setAlterWMTriggerDesc(alterWMTriggerDesc);
+    this.alterWMTriggerDesc = alterWMTriggerDesc;
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       DropWMTriggerDesc dropWMTriggerDesc) {
     this(inputs, outputs);
-    this.setDropWMTriggerDesc(dropWMTriggerDesc);
+    this.dropWMTriggerDesc = dropWMTriggerDesc;
+  }
+
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      CreateOrAlterWMPoolDesc wmPoolDesc) {
+    this(inputs, outputs);
+    this.wmPoolDesc = wmPoolDesc;
+  }
+
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      DropWMPoolDesc dropWMPoolDesc) {
+    this(inputs, outputs);
+    this.dropWMPoolDesc = dropWMPoolDesc;
+  }
+
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      CreateOrAlterWMMappingDesc wmMappingDesc) {
+    this(inputs, outputs);
+    this.wmMappingDesc = wmMappingDesc;
+  }
+
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      DropWMMappingDesc dropWMMappingDesc) {
+    this(inputs, outputs);
+    this.dropWMMappingDesc = dropWMMappingDesc;
+  }
+
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      CreateOrDropTriggerToPoolMappingDesc triggerToPoolMappingDesc) {
+    this(inputs, outputs);
+    this.triggerToPoolMappingDesc = triggerToPoolMappingDesc;
   }
 
   /**
@@ -1363,4 +1401,44 @@ public class DDLWork implements Serializable {
   public void setDropWMTriggerDesc(DropWMTriggerDesc dropWMTriggerDesc) {
     this.dropWMTriggerDesc = dropWMTriggerDesc;
   }
+
+  public CreateOrAlterWMPoolDesc getWmPoolDesc() {
+    return wmPoolDesc;
+  }
+
+  public void setWmPoolDesc(CreateOrAlterWMPoolDesc wmPoolDesc) {
+    this.wmPoolDesc = wmPoolDesc;
+  }
+
+  public DropWMPoolDesc getDropWMPoolDesc() {
+    return dropWMPoolDesc;
+  }
+
+  public void setDropWMPoolDesc(DropWMPoolDesc dropWMPoolDesc) {
+    this.dropWMPoolDesc = dropWMPoolDesc;
+  }
+
+  public CreateOrAlterWMMappingDesc getWmMappingDesc() {
+    return wmMappingDesc;
+  }
+
+  public void setWmMappingDesc(CreateOrAlterWMMappingDesc wmMappingDesc) {
+    this.wmMappingDesc = wmMappingDesc;
+  }
+
+  public DropWMMappingDesc getDropWMMappingDesc() {
+    return dropWMMappingDesc;
+  }
+
+  public void setDropWMMappingDesc(DropWMMappingDesc dropWMMappingDesc) {
+    this.dropWMMappingDesc = dropWMMappingDesc;
+  }
+
+  public CreateOrDropTriggerToPoolMappingDesc getTriggerToPoolMappingDesc() {
+    return triggerToPoolMappingDesc;
+  }
+
+  public void setTriggerToPoolMappingDesc(CreateOrDropTriggerToPoolMappingDesc triggerToPoolMappingDesc) {
+    this.triggerToPoolMappingDesc = triggerToPoolMappingDesc;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMMappingDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMMappingDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMMappingDesc.java
new file mode 100644
index 0000000..56b81ca
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMMappingDesc.java
@@ -0,0 +1,29 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.metastore.api.WMMapping;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Drop resource plan",
+    explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class DropWMMappingDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = -1567558687529244218L;
+
+  private WMMapping mapping;
+
+  public DropWMMappingDesc() {}
+
+  public DropWMMappingDesc(WMMapping mapping) {
+    this.mapping = mapping;
+  }
+
+  @Explain(displayName = "mapping", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public WMMapping getMapping() {
+    return mapping;
+  }
+
+  public void setMapping(WMMapping mapping) {
+    this.mapping = mapping;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMPoolDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMPoolDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMPoolDesc.java
new file mode 100644
index 0000000..ff1bedd
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropWMPoolDesc.java
@@ -0,0 +1,33 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+public class DropWMPoolDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = -2608462103392563252L;
+
+  private String resourcePlanName;
+  private String poolPath;
+
+  public DropWMPoolDesc() {}
+
+  public DropWMPoolDesc(String resourcePlanName, String poolPath) {
+    this.resourcePlanName = resourcePlanName;
+    this.poolPath = poolPath;
+  }
+
+  public String getResourcePlanName() {
+    return resourcePlanName;
+  }
+
+  public void setResourcePlanName(String resourcePlanName) {
+    this.resourcePlanName = resourcePlanName;
+  }
+
+  public String getPoolPath() {
+    return poolPath;
+  }
+
+  public void setPoolPath(String poolPath) {
+    this.poolPath = poolPath;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index 3fb1c26..32a24e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -149,7 +149,14 @@ public enum HiveOperation {
   DROP_RESOURCEPLAN("DROP RESOURCEPLAN", null, null, false, false),
   CREATE_TRIGGER("CREATE TRIGGER", null, null, false, false),
   ALTER_TRIGGER("ALTER TRIGGER", null, null, false, false),
-  DROP_TRIGGER("DROP TRIGGER", null, null, false, false);
+  DROP_TRIGGER("DROP TRIGGER", null, null, false, false),
+  CREATE_POOL("CREATE POOL", null, null, false, false),
+  ALTER_POOL("ALTER POOL", null, null, false, false),
+  DROP_POOL("DROP POOL", null, null, false, false),
+  CREATE_MAPPING("CREATE MAPPING", null, null, false, false),
+  ALTER_MAPPING("ALTER MAPPING", null, null, false, false),
+  DROP_MAPPING("DROP MAPPING", null, null, false, false);
+
 
   private String operationName;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
index dac4471..d3aad3a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -141,6 +141,12 @@ public enum HiveOperationType {
   CREATE_TRIGGER,
   ALTER_TRIGGER,
   DROP_TRIGGER,
+  CREATE_POOL,
+  ALTER_POOL,
+  DROP_POOL,
+  CREATE_MAPPING,
+  ALTER_MAPPING,
+  DROP_MAPPING,
 
   // ==== Hive command operation types starts here ==== //
   SET,

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index b74b460..dc04b45 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -319,6 +319,12 @@ public class Operation2Privilege {
     adminPrivOps.add(HiveOperationType.CREATE_TRIGGER);
     adminPrivOps.add(HiveOperationType.ALTER_TRIGGER);
     adminPrivOps.add(HiveOperationType.DROP_TRIGGER);
+    adminPrivOps.add(HiveOperationType.CREATE_POOL);
+    adminPrivOps.add(HiveOperationType.ALTER_POOL);
+    adminPrivOps.add(HiveOperationType.DROP_POOL);
+    adminPrivOps.add(HiveOperationType.CREATE_MAPPING);
+    adminPrivOps.add(HiveOperationType.ALTER_MAPPING);
+    adminPrivOps.add(HiveOperationType.DROP_MAPPING);
 
     // operations require select priv
     op2Priv.put(HiveOperationType.SHOWCOLUMNS, PrivRequirement.newIOPrivRequirement
@@ -485,6 +491,12 @@ public class Operation2Privilege {
     op2Priv.put(HiveOperationType.CREATE_TRIGGER, PrivRequirement.newIOPrivRequirement(null, null));
     op2Priv.put(HiveOperationType.ALTER_TRIGGER, PrivRequirement.newIOPrivRequirement(null, null));
     op2Priv.put(HiveOperationType.DROP_TRIGGER, PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.CREATE_POOL, PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.ALTER_POOL, PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.DROP_POOL, PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.CREATE_MAPPING, PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.ALTER_MAPPING, PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.DROP_MAPPING, PrivRequirement.newIOPrivRequirement(null, null));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java
index 156da4c..4cb9172 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestWorkloadManager.java
@@ -136,7 +136,7 @@ public class TestWorkloadManager {
 
   public static WMMapping mapping(String type, String user, String pool, int ordering) {
     WMMapping mapping = new WMMapping("rp", type, user);
-    mapping.setPoolName(pool);
+    mapping.setPoolPath(pool);
     mapping.setOrdering(ordering);
     return mapping;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/test/queries/clientpositive/resourceplan.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/resourceplan.q b/ql/src/test/queries/clientpositive/resourceplan.q
index a094712..afb9ceb 100644
--- a/ql/src/test/queries/clientpositive/resourceplan.q
+++ b/ql/src/test/queries/clientpositive/resourceplan.q
@@ -25,11 +25,14 @@ SHOW RESOURCE PLAN plan_1;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
 -- Create and show plan_2.
-CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM 10;
+CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM=10;
 SHOW RESOURCE PLANS;
 SHOW RESOURCE PLAN plan_2;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
+-- Should fail cannot set pool in create.
+CREATE RESOURCE PLAN plan_3 WITH QUERY_PARALLELISM=5, DEFAULT POOL = `all`;
+
 --
 -- Rename resource plans.
 --
@@ -47,7 +50,7 @@ ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 20;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
 -- Will fail for now; there are no pools.
-ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = 'default';
+ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = default1;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
 --
@@ -109,6 +112,10 @@ DROP RESOURCE PLAN plan_2;
 DROP RESOURCE PLAN plan_3;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
+-- Use reserved keyword table as name.
+CREATE RESOURCE PLAN `table`;
+ALTER RESOURCE PLAN `table` SET QUERY_PARALLELISM = 1;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
 
 --
 -- Create trigger commands.
@@ -134,6 +141,16 @@ SELECT * FROM SYS.WM_TRIGGERS;
 -- No edit on active resource plan.
 CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ = 0m DO MOVE TO null_pool;
 
+-- Add trigger with reserved keywords.
+CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > 100K DO MOVE TO `table`;
+CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > 100K DO MOVE TO `default`;
+CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > 1M DO MOVE TO `default`;
+CREATE TRIGGER `table`.`trigger1` WHEN ELAPSED_TIME > 10 DO KILL;
+CREATE TRIGGER `table`.`trigger2` WHEN BYTES_READ > 100 DO KILL;
+SELECT * FROM SYS.WM_TRIGGERS;
+DROP TRIGGER `table`.`database`;
+SELECT * FROM SYS.WM_TRIGGERS;
+
 -- Cannot drop/change trigger from enabled plan.
 ALTER RESOURCE PLAN plan_1 ENABLE;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
@@ -150,3 +167,136 @@ ALTER TRIGGER plan_1.trigger_2 WHEN BYTES_READ = 1000K DO KILL;
 ALTER RESOURCE PLAN plan_2 DISABLE;
 CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ = 0 DO MOVE TO null_pool;
 SELECT * FROM SYS.WM_TRIGGERS;
+
+
+--
+-- Create pool command.
+--
+
+-- Cannot create pool in active plans.
+CREATE POOL plan_1.default WITH
+   ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default';
+
+CREATE POOL plan_2.default WITH
+   ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default';
+SELECT * FROM SYS.WM_POOLS;
+
+CREATE POOL plan_2.default.c1 WITH
+    ALLOC_FRACTION=0.3, QUERY_PARALLELISM=3, SCHEDULING_POLICY='priority';
+
+CREATE POOL plan_2.default.c2 WITH
+    QUERY_PARALLELISM=2, SCHEDULING_POLICY='fair', ALLOC_FRACTION=0.7;
+
+ALTER POOL plan_2.default.c2 SET ALLOC_FRACTION = 0.2;
+
+ALTER POOL plan_2.default SET path = def;
+SELECT * FROM SYS.WM_POOLS;
+
+DROP POOL plan_2.default;
+SELECT * FROM SYS.WM_POOLS;
+
+-- Create failed no parent pool found.
+CREATE POOL plan_2.child1.child2 WITH
+    QUERY_PARALLELISM=2, SCHEDULING_POLICY='fcfs', ALLOC_FRACTION=0.8;
+
+-- Create nested pools.
+CREATE POOL `table`.`table` WITH
+  SCHEDULING_POLICY='random', ALLOC_FRACTION=0.5, QUERY_PARALLELISM=1;
+
+CREATE POOL `table`.`table`.pool1 WITH
+  SCHEDULING_POLICY='priority', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.9;
+CREATE POOL `table`.`table`.pool1.child1 WITH
+  SCHEDULING_POLICY='random', QUERY_PARALLELISM=1, ALLOC_FRACTION=0.3;
+CREATE POOL `table`.`table`.pool1.child2 WITH
+  SCHEDULING_POLICY='fair', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.7;
+ALTER POOL `table`.`table` SET ALLOC_FRACTION=0.0;
+SELECT * FROM SYS.WM_POOLS;
+
+-- Rename with child pools and parent pool.
+ALTER POOL `table`.`table`.pool1 SET PATH = `table`.pool;
+SELECT * FROM SYS.WM_POOLS;
+
+-- Fails has child pools.
+DROP POOL `table`.`table`;
+SELECT * FROM SYS.WM_POOLS;
+
+-- Fails default is default pool :-).
+DROP POOL `table`.default;
+SELECT * FROM SYS.WM_POOLS;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- Changed default pool, now it should work.
+ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool;
+DROP POOL `table`.default;
+SELECT * FROM SYS.WM_POOLS;
+
+--
+-- Pool to trigger mappings.
+--
+
+-- Success.
+ALTER POOL plan_2.def.c1 ADD TRIGGER trigger_1;
+ALTER POOL plan_2.def.c2 ADD TRIGGER trigger_1;
+
+-- With keywords, hopefully nobody does this.
+ALTER POOL `table`.`table` ADD TRIGGER `table`;
+
+-- Test m:n mappings.
+ALTER POOL `table`.`table`.pool.child1 ADD TRIGGER `table`;
+ALTER POOL `table`.`table`.pool.child1 ADD TRIGGER `trigger1`;
+ALTER POOL `table`.`table`.pool.child2 ADD TRIGGER `trigger1`;
+ALTER POOL `table`.`table`.pool.child2 ADD TRIGGER `trigger2`;
+SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS;
+
+-- Failures.
+
+
+-- pool does not exist.
+ALTER POOL plan_2.default ADD TRIGGER trigger_1;
+
+-- Trigger does not exist.
+ALTER POOL plan_2.def ADD TRIGGER trigger_2;
+
+SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS;
+
+-- Drop success.
+ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_1;
+
+-- Drop fail, does not exist.
+ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_2;
+
+-- Drops related mappings too.
+DROP POOL `table`.`table`.pool.child1;
+DROP POOL `table`.`table`.pool.child2;
+
+SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS;
+
+
+--
+-- User and group mappings.
+--
+
+CREATE USER MAPPING "user1" IN plan_2 TO def;
+CREATE USER MAPPING 'user2' IN plan_2 TO def WITH ORDER 1;
+CREATE GROUP MAPPING "group1" IN plan_2 TO def.c1;
+CREATE GROUP MAPPING 'group2' IN plan_2 TO def.c2 WITH ORDER 1;
+SELECT * FROM SYS.WM_MAPPINGS;
+
+-- Drop pool failed, pool in use.
+DROP POOL plan_2.def.c1;
+
+DROP USER MAPPING "user2" in plan_2;
+DROP GROUP MAPPING "group2" in plan_2;
+SELECT * FROM SYS.WM_MAPPINGS;
+
+CREATE RESOURCE PLAN plan_4;
+
+ALTER RESOURCE PLAN plan_4 ENABLE ACTIVATE;
+
+DROP RESOURCE PLAN plan_2;
+
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+SELECT * FROM SYS.WM_POOLS;
+SELECT * FROM SYS.WM_TRIGGERS;
+SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS;
+SELECT * FROM SYS.WM_MAPPINGS;

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/test/results/clientpositive/llap/resourceplan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/resourceplan.q.out b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
index fe4d77a..b6c2c79 100644
--- a/ql/src/test/results/clientpositive/llap/resourceplan.q.out
+++ b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
@@ -2211,6 +2211,154 @@ ON
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@WM_TRIGGERS
 POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS` (
+  `RP_NAME` string,
+  `PATH` string,
+  `ALLOC_FRACTION` double,
+  `QUERY_PARALLELISM` int,
+  `SCHEDULING_POLICY` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  WM_POOL.PATH,
+  WM_POOL.ALLOC_FRACTION,
+  WM_POOL.QUERY_PARALLELISM,
+  WM_POOL.SCHEDULING_POLICY
+FROM
+  WM_POOL
+JOIN
+  WM_RESOURCEPLAN
+ON
+  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@WM_POOLS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS` (
+  `RP_NAME` string,
+  `PATH` string,
+  `ALLOC_FRACTION` double,
+  `QUERY_PARALLELISM` int,
+  `SCHEDULING_POLICY` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  WM_POOL.PATH,
+  WM_POOL.ALLOC_FRACTION,
+  WM_POOL.QUERY_PARALLELISM,
+  WM_POOL.SCHEDULING_POLICY
+FROM
+  WM_POOL
+JOIN
+  WM_RESOURCEPLAN
+ON
+  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@WM_POOLS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
+  `RP_NAME` string,
+  `POOL_PATH` string,
+  `TRIGGER_NAME` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME RP_NAME,
+  WM_POOL.PATH POOL_PATH,
+  WM_TRIGGER.NAME TRIGGER_NAME
+FROM
+  WM_POOL_TO_TRIGGER
+JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
+JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
+JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@WM_POOLS_TO_TRIGGERS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
+  `RP_NAME` string,
+  `POOL_PATH` string,
+  `TRIGGER_NAME` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME RP_NAME,
+  WM_POOL.PATH POOL_PATH,
+  WM_TRIGGER.NAME TRIGGER_NAME
+FROM
+  WM_POOL_TO_TRIGGER
+JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
+JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
+JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@WM_POOLS_TO_TRIGGERS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `WM_MAPPINGS` (
+  `RP_NAME` string,
+  `ENTITY_TYPE` string,
+  `ENTITY_NAME` string,
+  `POOL_PATH` string,
+  `ORDERING` int
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  ENTITY_TYPE,
+  ENTITY_NAME,
+  WM_POOL.PATH,
+  ORDERING
+FROM
+  WM_MAPPING
+JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
+LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@WM_MAPPINGS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `WM_MAPPINGS` (
+  `RP_NAME` string,
+  `ENTITY_TYPE` string,
+  `ENTITY_NAME` string,
+  `POOL_PATH` string,
+  `ORDERING` int
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  ENTITY_TYPE,
+  ENTITY_NAME,
+  WM_POOL.PATH,
+  ORDERING
+FROM
+  WM_MAPPING
+JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
+LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@WM_MAPPINGS
+POSTHOOK: Output: database:sys
 PREHOOK: query: DROP DATABASE IF EXISTS INFORMATION_SCHEMA
 PREHOOK: type: DROPDATABASE
 POSTHOOK: query: DROP DATABASE IF EXISTS INFORMATION_SCHEMA
@@ -3000,9 +3148,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_1	DISABLED	NULL	default
-PREHOOK: query: CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM 10
+PREHOOK: query: CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM=10
 PREHOOK: type: CREATE RESOURCEPLAN
-POSTHOOK: query: CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM 10
+POSTHOOK: query: CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM=10
 POSTHOOK: type: CREATE RESOURCEPLAN
 PREHOOK: query: SHOW RESOURCE PLANS
 PREHOOK: type: SHOW RESOURCEPLAN
@@ -3025,6 +3173,7 @@ POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_1	DISABLED	NULL	default
 plan_2	DISABLED	10	default
+FAILED: SemanticException Invalid set in create resource plan: TOK_DEFAULT_POOL
 PREHOOK: query: ALTER RESOURCE PLAN plan_1 RENAME TO plan_2
 PREHOOK: type: ALTER RESOURCEPLAN
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Resource plan name should be unique: )
@@ -3066,9 +3215,9 @@ POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_3	DISABLED	20	default
 plan_2	DISABLED	10	default
-PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = 'default'
+PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT POOL = default1
 PREHOOK: type: ALTER RESOURCEPLAN
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Portion of expression could not be parsed: and resourcePlan == rp)
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. NoSuchObjectException(message:Cannot find pool: default1)
 PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 PREHOOK: type: QUERY
 PREHOOK: Input: sys@wm_resourceplans
@@ -3246,6 +3395,24 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2	ACTIVE	10	default
+PREHOOK: query: CREATE RESOURCE PLAN `table`
+PREHOOK: type: CREATE RESOURCEPLAN
+POSTHOOK: query: CREATE RESOURCE PLAN `table`
+POSTHOOK: type: CREATE RESOURCEPLAN
+PREHOOK: query: ALTER RESOURCE PLAN `table` SET QUERY_PARALLELISM = 1
+PREHOOK: type: ALTER RESOURCEPLAN
+POSTHOOK: query: ALTER RESOURCE PLAN `table` SET QUERY_PARALLELISM = 1
+POSTHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+plan_2	ACTIVE	10	default
+table	DISABLED	1	default
 PREHOOK: query: CREATE RESOURCE PLAN plan_1
 PREHOOK: type: CREATE RESOURCEPLAN
 POSTHOOK: query: CREATE RESOURCE PLAN plan_1
@@ -3310,6 +3477,57 @@ plan_1	trigger_2	BYTES_READ > 100	MOVE TO slow_pool
 PREHOOK: query: CREATE TRIGGER plan_2.trigger_1 WHEN BYTES_READ = 0m DO MOVE TO null_pool
 PREHOOK: type: CREATE TRIGGER
 FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.)
+PREHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > 100K DO MOVE TO `table`
+PREHOOK: type: CREATE TRIGGER
+POSTHOOK: query: CREATE TRIGGER `table`.`table` WHEN BYTES_WRITTEN > 100K DO MOVE TO `table`
+POSTHOOK: type: CREATE TRIGGER
+PREHOOK: query: CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > 100K DO MOVE TO `default`
+PREHOOK: type: CREATE TRIGGER
+POSTHOOK: query: CREATE TRIGGER `table`.`trigger` WHEN BYTES_WRITTEN > 100K DO MOVE TO `default`
+POSTHOOK: type: CREATE TRIGGER
+PREHOOK: query: CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > 1M DO MOVE TO `default`
+PREHOOK: type: CREATE TRIGGER
+POSTHOOK: query: CREATE TRIGGER `table`.`database` WHEN BYTES_WRITTEN > 1M DO MOVE TO `default`
+POSTHOOK: type: CREATE TRIGGER
+PREHOOK: query: CREATE TRIGGER `table`.`trigger1` WHEN ELAPSED_TIME > 10 DO KILL
+PREHOOK: type: CREATE TRIGGER
+POSTHOOK: query: CREATE TRIGGER `table`.`trigger1` WHEN ELAPSED_TIME > 10 DO KILL
+POSTHOOK: type: CREATE TRIGGER
+PREHOOK: query: CREATE TRIGGER `table`.`trigger2` WHEN BYTES_READ > 100 DO KILL
+PREHOOK: type: CREATE TRIGGER
+POSTHOOK: query: CREATE TRIGGER `table`.`trigger2` WHEN BYTES_READ > 100 DO KILL
+POSTHOOK: type: CREATE TRIGGER
+PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_triggers
+#### A masked pattern was here ####
+plan_1	trigger_2	BYTES_READ > 100	MOVE TO slow_pool
+table	table	BYTES_WRITTEN > 100K	MOVE TO table
+table	trigger	BYTES_WRITTEN > 100K	MOVE TO default
+table	database	BYTES_WRITTEN > 1M	MOVE TO default
+table	trigger1	ELAPSED_TIME > 10	KILL
+table	trigger2	BYTES_READ > 100	KILL
+PREHOOK: query: DROP TRIGGER `table`.`database`
+PREHOOK: type: DROP TRIGGER
+POSTHOOK: query: DROP TRIGGER `table`.`database`
+POSTHOOK: type: DROP TRIGGER
+PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_triggers
+#### A masked pattern was here ####
+plan_1	trigger_2	BYTES_READ > 100	MOVE TO slow_pool
+table	table	BYTES_WRITTEN > 100K	MOVE TO table
+table	trigger	BYTES_WRITTEN > 100K	MOVE TO default
+table	trigger1	ELAPSED_TIME > 10	KILL
+table	trigger2	BYTES_READ > 100	KILL
 PREHOOK: query: ALTER RESOURCE PLAN plan_1 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 POSTHOOK: query: ALTER RESOURCE PLAN plan_1 ENABLE
@@ -3323,6 +3541,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2	ACTIVE	10	default
+table	DISABLED	1	default
 plan_1	ENABLED	NULL	default
 PREHOOK: query: DROP TRIGGER plan_1.trigger_2
 PREHOOK: type: DROP TRIGGER
@@ -3343,6 +3562,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2	ENABLED	10	default
+table	DISABLED	1	default
 plan_1	ACTIVE	NULL	default
 PREHOOK: query: DROP TRIGGER plan_1.trigger_2
 PREHOOK: type: DROP TRIGGER
@@ -3366,5 +3586,425 @@ POSTHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_triggers
 #### A masked pattern was here ####
+plan_1	trigger_2	BYTES_READ > 100	MOVE TO slow_pool
+table	table	BYTES_WRITTEN > 100K	MOVE TO table
+table	trigger	BYTES_WRITTEN > 100K	MOVE TO default
+table	trigger1	ELAPSED_TIME > 10	KILL
+table	trigger2	BYTES_READ > 100	KILL
 plan_2	trigger_1	BYTES_READ = 0	MOVE TO null_pool
+PREHOOK: query: CREATE POOL plan_1.default WITH
+   ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'
+PREHOOK: type: CREATE POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Resource plan must be disabled to edit it.)
+PREHOOK: query: CREATE POOL plan_2.default WITH
+   ALLOC_FRACTION=1.0, QUERY_PARALLELISM=5, SCHEDULING_POLICY='default'
+PREHOOK: type: CREATE POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Pool already exists: )
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	default	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+PREHOOK: query: CREATE POOL plan_2.default.c1 WITH
+    ALLOC_FRACTION=0.3, QUERY_PARALLELISM=3, SCHEDULING_POLICY='priority'
+PREHOOK: type: CREATE POOL
+POSTHOOK: query: CREATE POOL plan_2.default.c1 WITH
+    ALLOC_FRACTION=0.3, QUERY_PARALLELISM=3, SCHEDULING_POLICY='priority'
+POSTHOOK: type: CREATE POOL
+PREHOOK: query: CREATE POOL plan_2.default.c2 WITH
+    QUERY_PARALLELISM=2, SCHEDULING_POLICY='fair', ALLOC_FRACTION=0.7
+PREHOOK: type: CREATE POOL
+POSTHOOK: query: CREATE POOL plan_2.default.c2 WITH
+    QUERY_PARALLELISM=2, SCHEDULING_POLICY='fair', ALLOC_FRACTION=0.7
+POSTHOOK: type: CREATE POOL
+PREHOOK: query: ALTER POOL plan_2.default.c2 SET ALLOC_FRACTION = 0.2
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL plan_2.default.c2 SET ALLOC_FRACTION = 0.2
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL plan_2.default SET path = def
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL plan_2.default SET path = def
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+PREHOOK: query: DROP POOL plan_2.default
+PREHOOK: type: DROP POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. NoSuchObjectException(message:Cannot delete pool: default)
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+PREHOOK: query: CREATE POOL plan_2.child1.child2 WITH
+    QUERY_PARALLELISM=2, SCHEDULING_POLICY='fcfs', ALLOC_FRACTION=0.8
+PREHOOK: type: CREATE POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. NoSuchObjectException(message:Pool path is invalid, the parent does not exist)
+PREHOOK: query: CREATE POOL `table`.`table` WITH
+  SCHEDULING_POLICY='random', ALLOC_FRACTION=0.5, QUERY_PARALLELISM=1
+PREHOOK: type: CREATE POOL
+POSTHOOK: query: CREATE POOL `table`.`table` WITH
+  SCHEDULING_POLICY='random', ALLOC_FRACTION=0.5, QUERY_PARALLELISM=1
+POSTHOOK: type: CREATE POOL
+PREHOOK: query: CREATE POOL `table`.`table`.pool1 WITH
+  SCHEDULING_POLICY='priority', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.9
+PREHOOK: type: CREATE POOL
+POSTHOOK: query: CREATE POOL `table`.`table`.pool1 WITH
+  SCHEDULING_POLICY='priority', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.9
+POSTHOOK: type: CREATE POOL
+PREHOOK: query: CREATE POOL `table`.`table`.pool1.child1 WITH
+  SCHEDULING_POLICY='random', QUERY_PARALLELISM=1, ALLOC_FRACTION=0.3
+PREHOOK: type: CREATE POOL
+POSTHOOK: query: CREATE POOL `table`.`table`.pool1.child1 WITH
+  SCHEDULING_POLICY='random', QUERY_PARALLELISM=1, ALLOC_FRACTION=0.3
+POSTHOOK: type: CREATE POOL
+PREHOOK: query: CREATE POOL `table`.`table`.pool1.child2 WITH
+  SCHEDULING_POLICY='fair', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.7
+PREHOOK: type: CREATE POOL
+POSTHOOK: query: CREATE POOL `table`.`table`.pool1.child2 WITH
+  SCHEDULING_POLICY='fair', QUERY_PARALLELISM=3, ALLOC_FRACTION=0.7
+POSTHOOK: type: CREATE POOL
+PREHOOK: query: ALTER POOL `table`.`table` SET ALLOC_FRACTION=0.0
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table` SET ALLOC_FRACTION=0.0
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+table	table	0.0	1	random
+table	table.pool1	0.9	3	priority
+table	table.pool1.child1	0.3	1	random
+table	table.pool1.child2	0.7	3	fair
+PREHOOK: query: ALTER POOL `table`.`table`.pool1 SET PATH = `table`.pool
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table`.pool1 SET PATH = `table`.pool
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+table	table	0.0	1	random
+table	table.pool	0.9	3	priority
+table	table.pool.child1	0.3	1	random
+table	table.pool.child2	0.7	3	fair
+PREHOOK: query: DROP POOL `table`.`table`
+PREHOOK: type: DROP POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Pool has children cannot drop.)
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+table	table	0.0	1	random
+table	table.pool	0.9	3	priority
+table	table.pool.child1	0.3	1	random
+table	table.pool.child2	0.7	3	fair
+PREHOOK: query: DROP POOL `table`.default
+PREHOOK: type: DROP POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Cannot drop default pool of a resource plan)
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+table	default	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+table	table	0.0	1	random
+table	table.pool	0.9	3	priority
+table	table.pool.child1	0.3	1	random
+table	table.pool.child2	0.7	3	fair
+PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+plan_2	DISABLED	10	def
+table	DISABLED	1	default
+plan_1	ACTIVE	NULL	default
+PREHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool
+PREHOOK: type: ALTER RESOURCEPLAN
+POSTHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool
+POSTHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: query: DROP POOL `table`.default
+PREHOOK: type: DROP POOL
+POSTHOOK: query: DROP POOL `table`.default
+POSTHOOK: type: DROP POOL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+plan_2	def	1.0	4	NULL
+plan_1	default	1.0	4	NULL
+plan_2	def.c1	0.3	3	priority
+plan_2	def.c2	0.2	2	fair
+table	table	0.0	1	random
+table	table.pool	0.9	3	priority
+table	table.pool.child1	0.3	1	random
+table	table.pool.child2	0.7	3	fair
+PREHOOK: query: ALTER POOL plan_2.def.c1 ADD TRIGGER trigger_1
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL plan_2.def.c1 ADD TRIGGER trigger_1
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL plan_2.def.c2 ADD TRIGGER trigger_1
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL plan_2.def.c2 ADD TRIGGER trigger_1
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL `table`.`table` ADD TRIGGER `table`
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table` ADD TRIGGER `table`
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL `table`.`table`.pool.child1 ADD TRIGGER `table`
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table`.pool.child1 ADD TRIGGER `table`
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL `table`.`table`.pool.child1 ADD TRIGGER `trigger1`
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table`.pool.child1 ADD TRIGGER `trigger1`
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL `table`.`table`.pool.child2 ADD TRIGGER `trigger1`
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table`.pool.child2 ADD TRIGGER `trigger1`
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL `table`.`table`.pool.child2 ADD TRIGGER `trigger2`
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL `table`.`table`.pool.child2 ADD TRIGGER `trigger2`
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+table	table	table
+table	table.pool.child1	table
+table	table.pool.child1	trigger1
+table	table.pool.child2	trigger1
+table	table.pool.child2	trigger2
+plan_2	def.c1	trigger_1
+plan_2	def.c2	trigger_1
+PREHOOK: query: ALTER POOL plan_2.default ADD TRIGGER trigger_1
+PREHOOK: type: ALTER POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. NoSuchObjectException(message:Cannot find pool: default)
+PREHOOK: query: ALTER POOL plan_2.def ADD TRIGGER trigger_2
+PREHOOK: type: ALTER POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. NoSuchObjectException(message:Cannot find trigger with name: trigger_2)
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+table	table	table
+table	table.pool.child1	table
+table	table.pool.child1	trigger1
+table	table.pool.child2	trigger1
+table	table.pool.child2	trigger2
+plan_2	def.c1	trigger_1
+plan_2	def.c2	trigger_1
+PREHOOK: query: ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_1
+PREHOOK: type: ALTER POOL
+POSTHOOK: query: ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_1
+POSTHOOK: type: ALTER POOL
+PREHOOK: query: ALTER POOL plan_2.def.c1 DROP TRIGGER trigger_2
+PREHOOK: type: ALTER POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. NoSuchObjectException(message:Cannot find trigger with name: trigger_2)
+PREHOOK: query: DROP POOL `table`.`table`.pool.child1
+PREHOOK: type: DROP POOL
+POSTHOOK: query: DROP POOL `table`.`table`.pool.child1
+POSTHOOK: type: DROP POOL
+PREHOOK: query: DROP POOL `table`.`table`.pool.child2
+PREHOOK: type: DROP POOL
+POSTHOOK: query: DROP POOL `table`.`table`.pool.child2
+POSTHOOK: type: DROP POOL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+table	table	table
+plan_2	def.c2	trigger_1
+PREHOOK: query: CREATE USER MAPPING "user1" IN plan_2 TO def
+PREHOOK: type: CREATE MAPPING
+POSTHOOK: query: CREATE USER MAPPING "user1" IN plan_2 TO def
+POSTHOOK: type: CREATE MAPPING
+PREHOOK: query: CREATE USER MAPPING 'user2' IN plan_2 TO def WITH ORDER 1
+PREHOOK: type: CREATE MAPPING
+POSTHOOK: query: CREATE USER MAPPING 'user2' IN plan_2 TO def WITH ORDER 1
+POSTHOOK: type: CREATE MAPPING
+PREHOOK: query: CREATE GROUP MAPPING "group1" IN plan_2 TO def.c1
+PREHOOK: type: CREATE MAPPING
+POSTHOOK: query: CREATE GROUP MAPPING "group1" IN plan_2 TO def.c1
+POSTHOOK: type: CREATE MAPPING
+PREHOOK: query: CREATE GROUP MAPPING 'group2' IN plan_2 TO def.c2 WITH ORDER 1
+PREHOOK: type: CREATE MAPPING
+POSTHOOK: query: CREATE GROUP MAPPING 'group2' IN plan_2 TO def.c2 WITH ORDER 1
+POSTHOOK: type: CREATE MAPPING
+PREHOOK: query: SELECT * FROM SYS.WM_MAPPINGS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_mappings
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_MAPPINGS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_mappings
+#### A masked pattern was here ####
+plan_2	USER	user1	def	0
+plan_2	USER	user2	def	1
+plan_2	GROUP	group1	def.c1	0
+plan_2	GROUP	group2	def.c2	1
+PREHOOK: query: DROP POOL plan_2.def.c1
+PREHOOK: type: DROP POOL
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message:Exception thrown flushing changes to datastore)
+PREHOOK: query: DROP USER MAPPING "user2" in plan_2
+PREHOOK: type: DROP MAPPING
+POSTHOOK: query: DROP USER MAPPING "user2" in plan_2
+POSTHOOK: type: DROP MAPPING
+PREHOOK: query: DROP GROUP MAPPING "group2" in plan_2
+PREHOOK: type: DROP MAPPING
+POSTHOOK: query: DROP GROUP MAPPING "group2" in plan_2
+POSTHOOK: type: DROP MAPPING
+PREHOOK: query: SELECT * FROM SYS.WM_MAPPINGS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_mappings
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_MAPPINGS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_mappings
+#### A masked pattern was here ####
+plan_2	USER	user1	def	0
+plan_2	GROUP	group1	def.c1	0
+PREHOOK: query: CREATE RESOURCE PLAN plan_4
+PREHOOK: type: CREATE RESOURCEPLAN
+POSTHOOK: query: CREATE RESOURCE PLAN plan_4
+POSTHOOK: type: CREATE RESOURCEPLAN
+PREHOOK: query: ALTER RESOURCE PLAN plan_4 ENABLE ACTIVATE
+PREHOOK: type: ALTER RESOURCEPLAN
+POSTHOOK: query: ALTER RESOURCE PLAN plan_4 ENABLE ACTIVATE
+POSTHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: query: DROP RESOURCE PLAN plan_2
+PREHOOK: type: DROP RESOURCEPLAN
+POSTHOOK: query: DROP RESOURCE PLAN plan_2
+POSTHOOK: type: DROP RESOURCEPLAN
+PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_resourceplans
+#### A masked pattern was here ####
+table	DISABLED	1	table.pool
+plan_1	ENABLED	NULL	default
+plan_4	ACTIVE	NULL	default
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools
+#### A masked pattern was here ####
+table	table	0.0	1	random
+table	table.pool	0.9	3	priority
+plan_1	default	1.0	4	NULL
+plan_4	default	1.0	4	NULL
+PREHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_triggers
+#### A masked pattern was here ####
 plan_1	trigger_2	BYTES_READ > 100	MOVE TO slow_pool
+table	table	BYTES_WRITTEN > 100K	MOVE TO table
+table	trigger	BYTES_WRITTEN > 100K	MOVE TO default
+table	trigger1	ELAPSED_TIME > 10	KILL
+table	trigger2	BYTES_READ > 100	KILL
+PREHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_POOLS_TO_TRIGGERS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_pools_to_triggers
+#### A masked pattern was here ####
+table	table	table
+PREHOOK: query: SELECT * FROM SYS.WM_MAPPINGS
+PREHOOK: type: QUERY
+PREHOOK: Input: sys@wm_mappings
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM SYS.WM_MAPPINGS
+POSTHOOK: type: QUERY
+POSTHOOK: Input: sys@wm_mappings
+#### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/44ef5991/ql/src/test/results/clientpositive/llap/sysdb.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/sysdb.q.out b/ql/src/test/results/clientpositive/llap/sysdb.q.out
index d8ded1d..9681b6f 100644
--- a/ql/src/test/results/clientpositive/llap/sysdb.q.out
+++ b/ql/src/test/results/clientpositive/llap/sysdb.q.out
@@ -2279,6 +2279,154 @@ ON
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@WM_TRIGGERS
 POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS` (
+  `RP_NAME` string,
+  `PATH` string,
+  `ALLOC_FRACTION` double,
+  `QUERY_PARALLELISM` int,
+  `SCHEDULING_POLICY` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  WM_POOL.PATH,
+  WM_POOL.ALLOC_FRACTION,
+  WM_POOL.QUERY_PARALLELISM,
+  WM_POOL.SCHEDULING_POLICY
+FROM
+  WM_POOL
+JOIN
+  WM_RESOURCEPLAN
+ON
+  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@WM_POOLS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS` (
+  `RP_NAME` string,
+  `PATH` string,
+  `ALLOC_FRACTION` double,
+  `QUERY_PARALLELISM` int,
+  `SCHEDULING_POLICY` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  WM_POOL.PATH,
+  WM_POOL.ALLOC_FRACTION,
+  WM_POOL.QUERY_PARALLELISM,
+  WM_POOL.SCHEDULING_POLICY
+FROM
+  WM_POOL
+JOIN
+  WM_RESOURCEPLAN
+ON
+  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@WM_POOLS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
+  `RP_NAME` string,
+  `POOL_PATH` string,
+  `TRIGGER_NAME` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME RP_NAME,
+  WM_POOL.PATH POOL_PATH,
+  WM_TRIGGER.NAME TRIGGER_NAME
+FROM
+  WM_POOL_TO_TRIGGER
+JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
+JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
+JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@WM_POOLS_TO_TRIGGERS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
+  `RP_NAME` string,
+  `POOL_PATH` string,
+  `TRIGGER_NAME` string
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME RP_NAME,
+  WM_POOL.PATH POOL_PATH,
+  WM_TRIGGER.NAME TRIGGER_NAME
+FROM
+  WM_POOL_TO_TRIGGER
+JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
+JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
+JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@WM_POOLS_TO_TRIGGERS
+POSTHOOK: Output: database:sys
+PREHOOK: query: CREATE TABLE IF NOT EXISTS `WM_MAPPINGS` (
+  `RP_NAME` string,
+  `ENTITY_TYPE` string,
+  `ENTITY_NAME` string,
+  `POOL_PATH` string,
+  `ORDERING` int
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  ENTITY_TYPE,
+  ENTITY_NAME,
+  WM_POOL.PATH,
+  ORDERING
+FROM
+  WM_MAPPING
+JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
+LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: SYS@WM_MAPPINGS
+PREHOOK: Output: database:sys
+POSTHOOK: query: CREATE TABLE IF NOT EXISTS `WM_MAPPINGS` (
+  `RP_NAME` string,
+  `ENTITY_TYPE` string,
+  `ENTITY_NAME` string,
+  `POOL_PATH` string,
+  `ORDERING` int
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "METASTORE",
+"hive.sql.query" =
+"SELECT
+  WM_RESOURCEPLAN.NAME,
+  ENTITY_TYPE,
+  ENTITY_NAME,
+  WM_POOL.PATH,
+  ORDERING
+FROM
+  WM_MAPPING
+JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
+LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: SYS@WM_MAPPINGS
+POSTHOOK: Output: database:sys
 PREHOOK: query: DROP DATABASE IF EXISTS INFORMATION_SCHEMA
 PREHOOK: type: DROPDATABASE
 POSTHOOK: query: DROP DATABASE IF EXISTS INFORMATION_SCHEMA
@@ -3059,7 +3207,7 @@ POSTHOOK: query: select count(*) from cds
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@cds
 #### A masked pattern was here ####
-67
+70
 PREHOOK: query: select column_name, type_name, integer_idx from columns_v2 order by column_name, integer_idx limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: sys@columns_v2
@@ -3072,7 +3220,7 @@ a	decimal(10,2)	0
 action_expression	string	3
 add_time	int	1
 aint	int	0
-astring	string	1
+alloc_fraction	double	2
 PREHOOK: query: select param_key, param_value from database_params order by param_key, param_value limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: sys@database_params
@@ -3213,7 +3361,7 @@ POSTHOOK: query: select count(*) from sds
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@sds
 #### A masked pattern was here ####
-73
+76
 PREHOOK: query: select param_key, param_value from sd_params order by param_key, param_value limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: sys@sd_params
@@ -3325,8 +3473,8 @@ POSTHOOK: Input: sys@table_params
 COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true","COLUMN_STATS":{"a":"true","b":"true","c":"true","d":"true","e":"true","f":"true","g":"true"}}
 COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true","COLUMN_STATS":{"action_expression":"true","name":"true","rp_name":"true","trigger_expression":"true"}}
 COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true","COLUMN_STATS":{"add_time":"true","grant_option":"true","grantor":"true","grantor_type":"true","principal_name":"true","principal_type":"true","role_grant_id":"true","role_id":"true"}}
+COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true","COLUMN_STATS":{"alloc_fraction":"true","path":"true","query_parallelism":"true","rp_name":"true","scheduling_policy":"true"}}
 COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true","COLUMN_STATS":{"avg_col_len":"true","big_decimal_high_value":"true","big_decimal_low_value":"true","column_name":"true","column_type":"true","cs_id":"true","db_name":"true","double_high_value":"true","double_low_value":"true","last_analyzed":"true","long_high_value":"true","long_low_value":"true","max_col_len":"true","num_distincts":"true","num_falses":"true","num_nulls":"true","num_trues":"true","part_id":"true","partition_name":"true","table_name":"true"}}
-COLUMN_STATS_ACCURATE	{"BASIC_STATS":"true","COLUMN_STATS":{"avg_col_len":"true","big_decimal_high_value":"true","big_decimal_low_value":"true","column_name":"true","column_type":"true","cs_id":"true","db_name":"true","double_high_value":"true","double_low_value":"true","last_analyzed":"true","long_high_value":"true","long_low_value":"true","max_col_len":"true","num_distincts":"true","num_falses":"true","num_nulls":"true","num_trues":"true","table_name":"true","tbl_id":"true"}}
 PREHOOK: query: select tbl_name from tbls order by tbl_name limit 5
 PREHOOK: type: QUERY
 PREHOOK: Input: sys@tbls
@@ -3439,7 +3587,7 @@ POSTHOOK: Input: sys@table_params
 POSTHOOK: Input: sys@table_stats_view
 #### A masked pattern was here ####
 {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}	0	0	0	0
-{"BASIC_STATS":"true","COLUMN_STATS":{"action_expression":"true","name":"true","rp_name":"true","trigger_expression":"true"}}	0	0	0	0
+{"BASIC_STATS":"true","COLUMN_STATS":{"entity_name":"true","entity_type":"true","ordering":"true","pool_path":"true","rp_name":"true"}}	0	0	0	0
 {"BASIC_STATS":"true","COLUMN_STATS":{"next_val":"true","sequence_name":"true"}}	0	0	0	0
 {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}	0	0	0	0
 #### A masked pattern was here ####
@@ -3644,6 +3792,9 @@ default	sys	tbl_col_privs	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
 default	sys	tbl_privs	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
 default	sys	tbls	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
 default	sys	version	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
+default	sys	wm_mappings	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
+default	sys	wm_pools	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
+default	sys	wm_pools_to_triggers	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
 default	sys	wm_resourceplans	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
 default	sys	wm_triggers	BASE_TABLE	NULL	NULL	NULL	NULL	NULL	YES	NO	NULL
 PREHOOK: query: select * from TABLE_PRIVILEGES order by GRANTOR, GRANTEE, TABLE_SCHEMA, TABLE_NAME, PRIVILEGE_TYPE limit 10