You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2018/01/06 00:19:22 UTC
hive git commit: HIVE-18096 : add a user-friendly show plan command
(Harish Jaiprakash, reviewed by Sergey Shelukhin)
Repository: hive
Updated Branches:
refs/heads/master a6435cca3 -> a6b88d9d2
HIVE-18096 : add a user-friendly show plan command (Harish Jaiprakash, reviewed by Sergey Shelukhin)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a6b88d9d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a6b88d9d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a6b88d9d
Branch: refs/heads/master
Commit: a6b88d9d2ff74a3767fe316e72d1dd3f69fc6b4a
Parents: a6435cc
Author: sergey <se...@apache.org>
Authored: Fri Jan 5 16:14:36 2018 -0800
Committer: sergey <se...@apache.org>
Committed: Fri Jan 5 16:14:36 2018 -0800
----------------------------------------------------------------------
.../listener/DummyRawStoreFailEvent.java | 2 +-
.../org/apache/hadoop/hive/ql/exec/DDLTask.java | 6 +-
.../apache/hadoop/hive/ql/metadata/Hive.java | 2 +-
.../formatting/JsonMetaDataFormatter.java | 97 ++++++++++++
.../formatting/MetaDataFormatUtils.java | 128 +++++++++++++++
.../metadata/formatting/MetaDataFormatter.java | 7 +-
.../formatting/TextMetaDataFormatter.java | 93 ++++++++++-
.../hive/ql/parse/DDLSemanticAnalyzer.java | 4 +-
.../hive/ql/plan/ShowResourcePlanDesc.java | 17 +-
.../formatting/TestJsonRPFormatter.java | 156 +++++++++++++++++++
.../test/queries/clientpositive/resourceplan.q | 2 +
.../clientpositive/llap/resourceplan.q.out | 15 +-
.../gen/thrift/gen-cpp/hive_metastore_types.cpp | 2 +-
.../gen/thrift/gen-cpp/hive_metastore_types.h | 4 +-
.../api/WMGetResourcePlanResponse.java | 16 +-
.../src/gen/thrift/gen-php/metastore/Types.php | 6 +-
.../gen/thrift/gen-py/hive_metastore/ttypes.py | 4 +-
.../gen/thrift/gen-rb/hive_metastore_types.rb | 2 +-
.../hadoop/hive/metastore/HiveMetaStore.java | 2 +-
.../hive/metastore/HiveMetaStoreClient.java | 2 +-
.../hadoop/hive/metastore/IMetaStoreClient.java | 2 +-
.../hadoop/hive/metastore/ObjectStore.java | 11 +-
.../apache/hadoop/hive/metastore/RawStore.java | 2 +-
.../hive/metastore/cache/CachedStore.java | 3 +-
.../src/main/thrift/hive_metastore.thrift | 2 +-
.../DummyRawStoreControlledCommit.java | 2 +-
.../DummyRawStoreForJdoConnection.java | 2 +-
27 files changed, 537 insertions(+), 54 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
----------------------------------------------------------------------
diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
index 6dc052d..eded92e 100644
--- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
+++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
@@ -987,7 +987,7 @@ public class DummyRawStoreFailEvent implements RawStore, Configurable {
}
@Override
- public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ public WMFullResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
return objectStore.getResourcePlan(name);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index b947ab1..cf8386b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -696,14 +696,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
// Note: Enhance showResourcePlan to display all the pools, triggers and mappings.
DataOutputStream out = getOutputStream(showResourcePlanDesc.getResFile());
try {
- List<WMResourcePlan> resourcePlans;
String rpName = showResourcePlanDesc.getResourcePlanName();
if (rpName != null) {
- resourcePlans = Collections.singletonList(db.getResourcePlan(rpName));
+ formatter.showFullResourcePlan(out, db.getResourcePlan(rpName));
} else {
- resourcePlans = db.getAllResourcePlans();
+ formatter.showResourcePlans(out, db.getAllResourcePlans());
}
- formatter.showResourcePlans(out, resourcePlans);
} catch (Exception e) {
throw new HiveException(e);
} finally {
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 022ba04..20d7593 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -4758,7 +4758,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
}
}
- public WMResourcePlan getResourcePlan(String rpName) throws HiveException {
+ public WMFullResourcePlan getResourcePlan(String rpName) throws HiveException {
try {
return getMSC().getResourcePlan(rpName);
} catch (NoSuchObjectException e) {
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
index a44adf6..035c5fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.metadata.formatting;
+import java.io.Closeable;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
@@ -39,6 +40,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -448,6 +450,101 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
}
}
+ /**
+ * Formats a resource plan into a json object, the structure is as follows:
+ * {
+ * name: "<rp_name>",
+ * parallelism: "<parallelism>",
+ * defaultQueue: "<defaultQueue>",
+ * pools : [
+ * {
+ * name: "<pool_name>",
+ * parallelism: "<parallelism>",
+ * schedulingPolicy: "<policy>",
+ * triggers: [
+ * { name: "<triggerName>", trigger: "<trigExpression>", action: "<actionExpr">}
+ * ...
+ * ]
+ * }
+ * ...
+ * ]
+ * }
+ */
+ private static class JsonRPFormatter implements MetaDataFormatUtils.RPFormatter, Closeable {
+ private final JsonGenerator generator;
+ private boolean inPool = false;
+
+ JsonRPFormatter(DataOutputStream out) throws IOException {
+ generator = new ObjectMapper().getJsonFactory().createJsonGenerator(out);
+ }
+
+ private void writeNameAndFields(String name, Object ... kvPairs) throws IOException {
+ if (kvPairs.length % 2 != 0) {
+ throw new IllegalArgumentException("Expected pairs");
+ }
+ generator.writeStringField("name", name);
+ for (int i = 0; i < kvPairs.length; i += 2) {
+ generator.writeObjectField(kvPairs[i].toString(), kvPairs[i + 1]);
+ }
+ }
+
+ @Override
+ public void formatRP(String rpName, Object ... kvPairs) throws IOException {
+ generator.writeStartObject();
+ writeNameAndFields(rpName, kvPairs);
+ generator.writeArrayFieldStart("pools");
+ }
+
+ @Override
+ public void formatPool(String poolName, int indentLevel, Object ... kvPairs)
+ throws IOException {
+ if (inPool) {
+ // End the triggers array.
+ generator.writeEndArray();
+ // End the pool object.
+ generator.writeEndObject();
+ } else {
+ inPool = true;
+ }
+ generator.writeStartObject();
+ writeNameAndFields(poolName, kvPairs);
+ generator.writeArrayFieldStart("triggers");
+ // triggers array and pool object left to be ended.
+ }
+
+ @Override
+ public void formatTrigger(String triggerName, String actionExpression, String triggerExpression,
+ int indentLevel) throws IOException {
+ generator.writeStartObject();
+ writeNameAndFields(triggerName, "action", actionExpression, "trigger", triggerExpression);
+ generator.writeEndObject();
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (inPool) {
+ // end the triggers within pool object.
+ generator.writeEndArray();
+ // End the last pool object.
+ generator.writeEndObject();
+ }
+ // End the pools array.
+ generator.writeEndArray();
+ // End the root rp object.
+ generator.writeEndObject();
+ generator.close();
+ }
+ }
+
+ public void showFullResourcePlan(DataOutputStream out, WMFullResourcePlan resourcePlan)
+ throws HiveException {
+ try (JsonRPFormatter formatter = new JsonRPFormatter(out)) {
+ MetaDataFormatUtils.formatFullRP(formatter, resourcePlan);
+ } catch (IOException e) {
+ throw new HiveException(e);
+ }
+ }
+
@Override
public void showErrors(DataOutputStream out, List<String> errors) throws HiveException {
JsonGenerator generator = null;
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index aa1a99d..489842e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -36,9 +36,15 @@ import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.LongColumnStatsData;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.StringColumnStatsData;
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMPool;
+import org.apache.hadoop.hive.metastore.api.WMPoolTrigger;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
import org.apache.hadoop.hive.ql.index.HiveIndex;
import org.apache.hadoop.hive.ql.index.HiveIndex.IndexType;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -54,11 +60,13 @@ import org.apache.hive.common.util.HiveStringUtils;
import com.google.common.collect.Lists;
+import java.io.IOException;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@@ -717,4 +725,124 @@ public final class MetaDataFormatUtils {
}
}
+ /**
+ * Interface to implement actual conversion to text or json of a resource plan.
+ */
+ public interface RPFormatter {
+ void formatRP(String rpName, Object ... kvPairs) throws IOException;
+ void formatPool(String poolName, int indentLevel, Object ...kvPairs) throws IOException;
+ void formatTrigger(String triggerName, String actionExpression, String triggerExpression,
+ int indentLevel) throws IOException;
+ }
+
+ /**
+ * A n-ary tree for the pools, each node contains a pool and its children.
+ */
+ private static class PoolTreeNode {
+ private WMPool pool;
+ private final List<PoolTreeNode> children = new ArrayList<>();
+ private final List<WMTrigger> triggers = new ArrayList<>();
+
+ private PoolTreeNode() {}
+
+ private void writePoolTreeNode(RPFormatter rpFormatter, int indentLevel) throws IOException {
+ String path = pool.getPoolPath();
+ int idx = path.lastIndexOf('.');
+ if (idx != -1) {
+ path = path.substring(idx + 1);
+ }
+ Double allocFraction = pool.getAllocFraction();
+ String schedulingPolicy = pool.isSetSchedulingPolicy() ? pool.getSchedulingPolicy() : null;
+ Integer parallelism = pool.getQueryParallelism();
+
+ rpFormatter.formatPool(path, indentLevel, "allocFraction", allocFraction,
+ "schedulingPolicy", schedulingPolicy, "parallelism", parallelism);
+ for (WMTrigger trigger : triggers) {
+ rpFormatter.formatTrigger(trigger.getTriggerName(), trigger.getActionExpression(),
+ trigger.getTriggerExpression(), indentLevel);
+ }
+ for (PoolTreeNode node : children) {
+ node.writePoolTreeNode(rpFormatter, indentLevel + 1);
+ }
+ }
+
+ private void sortChildren() {
+ children.sort((PoolTreeNode p1, PoolTreeNode p2) ->
+ Double.compare(p2.pool.getAllocFraction(), p1.pool.getAllocFraction()));
+ for (PoolTreeNode child : children) {
+ child.sortChildren();
+ }
+ }
+
+ static PoolTreeNode makePoolTree(WMFullResourcePlan fullRp) {
+ Map<String, PoolTreeNode> poolMap = new HashMap<>();
+ PoolTreeNode root = new PoolTreeNode();
+ for (WMPool pool : fullRp.getPools()) {
+ // Create or add node for current pool.
+ String path = pool.getPoolPath();
+ PoolTreeNode curr = poolMap.get(path);
+ if (curr == null) {
+ curr = new PoolTreeNode();
+ poolMap.put(path, curr);
+ }
+ curr.pool = pool;
+
+ // Add this node to the parent node.
+ int ind = path.lastIndexOf('.');
+ PoolTreeNode parent;
+ if (ind == -1) {
+ parent = root;
+ } else {
+ String parentPath = path.substring(0, ind);
+ parent = poolMap.get(parentPath);
+ if (parent == null) {
+ parent = new PoolTreeNode();
+ poolMap.put(parentPath, parent);
+ }
+ }
+ parent.children.add(curr);
+ }
+ Map<String, WMTrigger> triggerMap = new HashMap<>();
+ if (fullRp.getTriggers() != null) {
+ for (WMTrigger trigger : fullRp.getTriggers()) {
+ triggerMap.put(trigger.getTriggerName(), trigger);
+ }
+ }
+ if (fullRp.getPoolTriggers() != null) {
+ for (WMPoolTrigger pool2Trigger : fullRp.getPoolTriggers()) {
+ PoolTreeNode node = poolMap.get(pool2Trigger.getPool());
+ WMTrigger trigger = triggerMap.get(pool2Trigger.getTrigger());
+ if (node == null || trigger == null) {
+ throw new IllegalStateException("Invalid trigger to pool: " + pool2Trigger.getPool() +
+ ", " + pool2Trigger.getTrigger());
+ }
+ node.triggers.add(trigger);
+ }
+ }
+ return root;
+ }
+ }
+
+ private static void writeRPLine(RPFormatter rpFormatter, WMResourcePlan plan)
+ throws IOException {
+ Integer parallelism = plan.isSetQueryParallelism() ? plan.getQueryParallelism() : null;
+ String defaultPool = plan.isSetDefaultPoolPath() ? plan.getDefaultPoolPath() : null;
+ rpFormatter.formatRP(plan.getName(), "status", plan.getStatus().toString(),
+ "parallelism", parallelism, "defaultPool", defaultPool);
+ }
+
+ public static void formatFullRP(RPFormatter rpFormatter, WMFullResourcePlan fullRp)
+ throws HiveException {
+ try {
+ WMResourcePlan plan = fullRp.getPlan();
+ writeRPLine(rpFormatter, plan);
+ PoolTreeNode root = PoolTreeNode.makePoolTree(fullRp);
+ root.sortChildren();
+ for (PoolTreeNode pool : root.children) {
+ pool.writePoolTreeNode(rpFormatter, 1);
+ }
+ } catch (IOException e) {
+ throw new HiveException(e);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
index 6ba474c..cfc381b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
@@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -121,10 +122,12 @@ public interface MetaDataFormatter {
String location, String ownerName, String ownerType, Map<String, String> params)
throws HiveException;
- public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
+ void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
throws HiveException;
- public void showErrors(DataOutputStream out, List<String> errors)
+ void showFullResourcePlan(DataOutputStream out, WMFullResourcePlan resourcePlan)
throws HiveException;
+
+ void showErrors(DataOutputStream out, List<String> errors) throws HiveException;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index f3d878d..0f1e893 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.metadata.formatting;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
+import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -40,6 +41,7 @@ import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
@@ -544,24 +546,26 @@ class TextMetaDataFormatter implements MetaDataFormatter {
}
}
+ private static final Charset UTF_8 = Charset.forName("UTF-8");
+
public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
throws HiveException {
try {
for (WMResourcePlan plan : resourcePlans) {
- out.write(plan.getName().getBytes("UTF-8"));
+ out.write(plan.getName().getBytes(UTF_8));
out.write(separator);
- out.write(plan.getStatus().name().getBytes("UTF-8"));
+ out.write(plan.getStatus().name().getBytes(UTF_8));
out.write(separator);
if (plan.isSetQueryParallelism()) {
- out.writeBytes(Integer.toString(plan.getQueryParallelism()));
+ out.write(Integer.toString(plan.getQueryParallelism()).getBytes(UTF_8));
} else {
- out.writeBytes("null");
+ out.write("null".getBytes(UTF_8));
}
out.write(separator);
if (plan.isSetDefaultPoolPath()) {
- out.write(plan.getDefaultPoolPath().getBytes("UTF-8"));
+ out.write(plan.getDefaultPoolPath().getBytes(UTF_8));
} else {
- out.writeBytes("null");
+ out.write("null".getBytes(UTF_8));
}
out.write(terminator);
}
@@ -570,6 +574,83 @@ class TextMetaDataFormatter implements MetaDataFormatter {
}
}
+ /**
+ * Class to print text records for resource plans in the following format:
+ *
+ * <rp_name>[status=<STATUS>,parallelism=<parallelism>,defaultPool=<defaultPool>]
+ * <queue_name>[allocFraction=<fraction>,schedulingPolicy=<policy>,parallelism=<parallelism>]
+ * > <trigger_name>: if(<triggerExpression>){<actionExpression>}
+ */
+ private static class TextRPFormatter implements MetaDataFormatUtils.RPFormatter {
+ private final DataOutputStream out;
+
+ TextRPFormatter(DataOutputStream out) {
+ this.out = out;
+ }
+
+ @Override
+ public void formatRP(String rpName, Object ... kvPairs) throws IOException {
+ out.write(rpName.getBytes(UTF_8));
+ writeFields(kvPairs);
+ out.write(terminator);
+ }
+
+ private static final byte[] INDENT = " ".getBytes(UTF_8);
+
+ @Override
+ public void formatPool(String poolName, int indentLevel, Object ... kvPairs)
+ throws IOException {
+ for (int i = 0; i < indentLevel; ++i) {
+ out.write(INDENT);
+ }
+ out.write(poolName.getBytes(UTF_8));
+ writeFields(kvPairs);
+ out.write(terminator);
+ }
+
+ private void writeFields(Object ... kvPairs)
+ throws IOException {
+ if (kvPairs.length % 2 != 0) {
+ throw new IllegalArgumentException("Expected pairs, got: " + kvPairs.length);
+ }
+ if (kvPairs.length < 2) {
+ return;
+ }
+ out.write('[');
+ out.write(kvPairs[0].toString().getBytes(UTF_8));
+ out.write('=');
+ out.write((kvPairs[1] == null ? "null" : kvPairs[1].toString()).getBytes(UTF_8));
+ for (int i = 2; i < kvPairs.length; i += 2) {
+ out.write(',');
+ out.write(kvPairs[i].toString().getBytes(UTF_8));
+ out.write('=');
+ out.write((kvPairs[i + 1] == null ? "null" : kvPairs[i + 1].toString()).getBytes(UTF_8));
+ }
+ out.write(']');
+ }
+
+ @Override
+ public void formatTrigger(String triggerName, String actionExpression, String triggerExpression,
+ int indentLevel) throws IOException {
+ for (int i = 0; i < indentLevel; ++i) {
+ out.write(INDENT);
+ }
+ out.write(" > ".getBytes(UTF_8));
+ out.write(triggerName.getBytes(UTF_8));
+ out.write(": if(".getBytes(UTF_8));
+ out.write(triggerExpression.getBytes(UTF_8));
+ out.write("){".getBytes(UTF_8));
+ out.write(actionExpression.getBytes(UTF_8));
+ out.write('}');
+ out.write(terminator);
+ }
+ }
+
+ public void showFullResourcePlan(DataOutputStream out, WMFullResourcePlan fullResourcePlan)
+ throws HiveException {
+ MetaDataFormatUtils.formatFullRP(new TextRPFormatter(out), fullResourcePlan);
+ }
+
public void showErrors(DataOutputStream out, List<String> errors) throws HiveException {
try {
for (String error : errors) {
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index a092d3d..c413bc5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hive.ql.parse;
-import org.apache.hadoop.hive.ql.exec.tez.WorkloadManager;
-
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.antlr.runtime.tree.CommonTree;
@@ -937,7 +935,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
ShowResourcePlanDesc showResourcePlanDesc = new ShowResourcePlanDesc(rpName, ctx.getResFile());
rootTasks.add(TaskFactory.get(
new DDLWork(getInputs(), getOutputs(), showResourcePlanDesc), conf));
- setFetchTask(createFetchTask(showResourcePlanDesc.getSchema()));
+ setFetchTask(createFetchTask(showResourcePlanDesc.getSchema(rpName)));
}
private void analyzeAlterResourcePlan(ASTNode ast) throws SemanticException {
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
index 0b4cfb5..36aeabb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
@@ -23,12 +23,14 @@ import java.io.Serializable;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.plan.Explain.Level;
-@Explain(displayName = "Show Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+@Explain(displayName = "Show Resource plans",
+ explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public class ShowResourcePlanDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 6076076933035978545L;
- private static final String table = "show_resourceplan";
- private static final String schema = "rp_name,status,query_parallelism#string,string,int";
+ private static final String TABLE = "show_resourceplan";
+ private static final String ALL_SCHEMA = "rp_name,status,query_parallelism#string,string,int";
+ private static final String SINGLE_SCHEMA = "line#string";
String resFile;
String resourcePlanName;
@@ -50,16 +52,17 @@ public class ShowResourcePlanDesc extends DDLDesc implements Serializable {
this.resFile = resFile;
}
- @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ @Explain(displayName="resourcePlanName",
+ explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getResourcePlanName() {
return resourcePlanName;
}
public String getTable() {
- return table;
+ return TABLE;
}
- public String getSchema() {
- return schema;
+ public String getSchema(String rpName) {
+ return (rpName == null) ? ALL_SCHEMA : SINGLE_SCHEMA;
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java
new file mode 100644
index 0000000..f2b6890
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/formatting/TestJsonRPFormatter.java
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata.formatting;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.util.ArrayList;
+
+import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMPool;
+import org.apache.hadoop.hive.metastore.api.WMPoolTrigger;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.metastore.api.WMTrigger;
+import org.codehaus.jackson.JsonNode;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * Test class for json resource plan formatter.
+ */
+public class TestJsonRPFormatter {
+ private final JsonMetaDataFormatter formatter = new JsonMetaDataFormatter();
+
+ private ByteArrayOutputStream bos;
+ private DataOutputStream out;
+
+ @Before
+ public void setup() {
+ bos = new ByteArrayOutputStream();
+ out = new DataOutputStream(bos);
+ }
+
+ @After
+ public void teardown() throws Exception {
+ out.close();
+ bos.close();
+ }
+
+ private WMFullResourcePlan createRP(String name, Integer parallelism, String defaultPoolPath) {
+ WMResourcePlan rp = new WMResourcePlan(name);
+ rp.setStatus(WMResourcePlanStatus.ACTIVE);
+ if (parallelism != null) {
+ rp.setQueryParallelism(parallelism);
+ }
+ if (defaultPoolPath != null) {
+ rp.setDefaultPoolPath(defaultPoolPath);
+ }
+ WMFullResourcePlan fullRp = new WMFullResourcePlan(rp, new ArrayList<>());
+ return fullRp;
+ }
+
+ private void addPool(WMFullResourcePlan fullRp, String poolName, double allocFraction,
+ int parallelism, String policy) {
+ WMPool pool = new WMPool(fullRp.getPlan().getName(), poolName);
+ pool.setAllocFraction(allocFraction);
+ pool.setQueryParallelism(parallelism);
+ if (policy != null) {
+ pool.setSchedulingPolicy(policy);
+ }
+ fullRp.addToPools(pool);
+ }
+
+ private void addTrigger(WMFullResourcePlan fullRp, String triggerName, String action,
+ String expr, String poolName) {
+ WMTrigger trigger = new WMTrigger(fullRp.getPlan().getName(), triggerName);
+ trigger.setActionExpression(action);
+ trigger.setTriggerExpression(expr);
+ fullRp.addToTriggers(trigger);
+
+ WMPoolTrigger pool2Trigger = new WMPoolTrigger(poolName, triggerName);
+ fullRp.addToPoolTriggers(pool2Trigger);
+ }
+
+ @Test
+ public void testJsonEmptyRPFormatter() throws Exception {
+ WMFullResourcePlan fullRp = createRP("test_rp_1", null, null);
+ formatter.showFullResourcePlan(out, fullRp);
+ out.flush();
+
+ ObjectMapper objectMapper = new ObjectMapper();
+ JsonNode jsonTree = objectMapper.readTree(bos.toByteArray());
+
+ assertNotNull(jsonTree);
+ assertTrue(jsonTree.isObject());
+ assertEquals("test_rp_1", jsonTree.get("name").asText());
+ assertTrue(jsonTree.get("parallelism").isNull());
+ assertTrue(jsonTree.get("defaultPool").isNull());
+ assertTrue(jsonTree.get("pools").isArray());
+ assertEquals(0, jsonTree.get("pools").size());
+ }
+
+ @Test
+ public void testJsonRPFormatter() throws Exception {
+ WMFullResourcePlan fullRp = createRP("test_rp_2", 10, "def");
+ addPool(fullRp, "pool1", 0.3, 3, "fair");
+ addTrigger(fullRp, "trigger1", "KILL", "BYTES > 2", "pool1");
+ addPool(fullRp, "pool2", 0.7, 7, "fcfs");
+ formatter.showFullResourcePlan(out, fullRp);
+ out.flush();
+
+ ObjectMapper objectMapper = new ObjectMapper();
+ JsonNode jsonTree = objectMapper.readTree(bos.toByteArray());
+
+ assertNotNull(jsonTree);
+ assertTrue(jsonTree.isObject());
+ assertEquals("test_rp_2", jsonTree.get("name").asText());
+ assertEquals(10, jsonTree.get("parallelism").asInt());
+ assertEquals("def", jsonTree.get("defaultPool").asText());
+ assertTrue(jsonTree.get("pools").isArray());
+ assertEquals(2, jsonTree.get("pools").size());
+
+ JsonNode pool2 = jsonTree.get("pools").get(0);
+ assertEquals("pool2", pool2.get("name").asText());
+ assertEquals("fcfs", pool2.get("schedulingPolicy").asText());
+ assertEquals(7, pool2.get("parallelism").asInt());
+ assertEquals(0.7, pool2.get("allocFraction").asDouble(), 0.00001);
+ assertTrue(pool2.get("triggers").isArray());
+ assertEquals(0, pool2.get("triggers").size());
+
+ JsonNode pool1 = jsonTree.get("pools").get(1);
+ assertEquals("pool1", pool1.get("name").asText());
+ assertEquals("fair", pool1.get("schedulingPolicy").asText());
+ assertEquals(3, pool1.get("parallelism").asInt());
+ assertEquals(0.3, pool1.get("allocFraction").asDouble(), 0.00001);
+ assertTrue(pool1.get("triggers").isArray());
+ assertEquals(1, pool1.get("triggers").size());
+
+ JsonNode trigger1 = pool1.get("triggers").get(0);
+ assertEquals("trigger1", trigger1.get("name").asText());
+ assertEquals("KILL", trigger1.get("action").asText());
+ assertEquals("BYTES > 2", trigger1.get("trigger").asText());
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/test/queries/clientpositive/resourceplan.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/resourceplan.q b/ql/src/test/queries/clientpositive/resourceplan.q
index a4a9b7c..ce684ed 100644
--- a/ql/src/test/queries/clientpositive/resourceplan.q
+++ b/ql/src/test/queries/clientpositive/resourceplan.q
@@ -330,6 +330,8 @@ CREATE RESOURCE PLAN plan_4;
ALTER RESOURCE PLAN plan_4 ENABLE ACTIVATE;
+SHOW RESOURCE PLAN plan_2;
+
-- This should remove all pools, triggers & mappings.
DROP RESOURCE PLAN plan_2;
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/ql/src/test/results/clientpositive/llap/resourceplan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/resourceplan.q.out b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
index 94cf877..29c01a8 100644
--- a/ql/src/test/results/clientpositive/llap/resourceplan.q.out
+++ b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
@@ -3158,7 +3158,8 @@ PREHOOK: query: SHOW RESOURCE PLAN plan_1
PREHOOK: type: SHOW RESOURCEPLAN
POSTHOOK: query: SHOW RESOURCE PLAN plan_1
POSTHOOK: type: SHOW RESOURCEPLAN
-plan_1 DISABLED
+plan_1[status=DISABLED,parallelism=null,defaultPool=default]
+ default[allocFraction=1.0,schedulingPolicy=null,parallelism=4]
PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
PREHOOK: type: QUERY
PREHOOK: Input: sys@wm_resourceplans
@@ -3182,7 +3183,8 @@ PREHOOK: query: SHOW RESOURCE PLAN plan_2
PREHOOK: type: SHOW RESOURCEPLAN
POSTHOOK: query: SHOW RESOURCE PLAN plan_2
POSTHOOK: type: SHOW RESOURCEPLAN
-plan_2 DISABLED 4
+plan_2[status=DISABLED,parallelism=4,defaultPool=default]
+ default[allocFraction=1.0,schedulingPolicy=null,parallelism=4]
PREHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
PREHOOK: type: QUERY
PREHOOK: Input: sys@wm_resourceplans
@@ -4079,6 +4081,15 @@ PREHOOK: query: ALTER RESOURCE PLAN plan_4 ENABLE ACTIVATE
PREHOOK: type: ALTER RESOURCEPLAN
POSTHOOK: query: ALTER RESOURCE PLAN plan_4 ENABLE ACTIVATE
POSTHOOK: type: ALTER RESOURCEPLAN
+PREHOOK: query: SHOW RESOURCE PLAN plan_2
+PREHOOK: type: SHOW RESOURCEPLAN
+POSTHOOK: query: SHOW RESOURCE PLAN plan_2
+POSTHOOK: type: SHOW RESOURCEPLAN
+plan_2[status=DISABLED,parallelism=4,defaultPool=def]
+ def[allocFraction=1.0,schedulingPolicy=null,parallelism=4]
+ c2[allocFraction=0.7,schedulingPolicy=fair,parallelism=1]
+ > trigger_1: if(BYTES_READ = 0){MOVE TO null_pool}
+ c1[allocFraction=0.3,schedulingPolicy=fair,parallelism=3]
PREHOOK: query: DROP RESOURCE PLAN plan_2
PREHOOK: type: DROP RESOURCEPLAN
POSTHOOK: query: DROP RESOURCE PLAN plan_2
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
index ac7597a..87a5c29 100644
--- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
+++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
@@ -22257,7 +22257,7 @@ WMGetResourcePlanResponse::~WMGetResourcePlanResponse() throw() {
}
-void WMGetResourcePlanResponse::__set_resourcePlan(const WMResourcePlan& val) {
+void WMGetResourcePlanResponse::__set_resourcePlan(const WMFullResourcePlan& val) {
this->resourcePlan = val;
__isset.resourcePlan = true;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index 442255c..74ce8d3 100644
--- a/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/standalone-metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -9184,11 +9184,11 @@ class WMGetResourcePlanResponse {
}
virtual ~WMGetResourcePlanResponse() throw();
- WMResourcePlan resourcePlan;
+ WMFullResourcePlan resourcePlan;
_WMGetResourcePlanResponse__isset __isset;
- void __set_resourcePlan(const WMResourcePlan& val);
+ void __set_resourcePlan(const WMFullResourcePlan& val);
bool operator == (const WMGetResourcePlanResponse & rhs) const
{
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMGetResourcePlanResponse.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMGetResourcePlanResponse.java b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMGetResourcePlanResponse.java
index 638728e..7341dee 100644
--- a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMGetResourcePlanResponse.java
+++ b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMGetResourcePlanResponse.java
@@ -46,7 +46,7 @@ import org.slf4j.LoggerFactory;
schemes.put(TupleScheme.class, new WMGetResourcePlanResponseTupleSchemeFactory());
}
- private WMResourcePlan resourcePlan; // optional
+ private WMFullResourcePlan resourcePlan; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -112,7 +112,7 @@ import org.slf4j.LoggerFactory;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.RESOURCE_PLAN, new org.apache.thrift.meta_data.FieldMetaData("resourcePlan", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMResourcePlan.class)));
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WMFullResourcePlan.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(WMGetResourcePlanResponse.class, metaDataMap);
}
@@ -125,7 +125,7 @@ import org.slf4j.LoggerFactory;
*/
public WMGetResourcePlanResponse(WMGetResourcePlanResponse other) {
if (other.isSetResourcePlan()) {
- this.resourcePlan = new WMResourcePlan(other.resourcePlan);
+ this.resourcePlan = new WMFullResourcePlan(other.resourcePlan);
}
}
@@ -138,11 +138,11 @@ import org.slf4j.LoggerFactory;
this.resourcePlan = null;
}
- public WMResourcePlan getResourcePlan() {
+ public WMFullResourcePlan getResourcePlan() {
return this.resourcePlan;
}
- public void setResourcePlan(WMResourcePlan resourcePlan) {
+ public void setResourcePlan(WMFullResourcePlan resourcePlan) {
this.resourcePlan = resourcePlan;
}
@@ -167,7 +167,7 @@ import org.slf4j.LoggerFactory;
if (value == null) {
unsetResourcePlan();
} else {
- setResourcePlan((WMResourcePlan)value);
+ setResourcePlan((WMFullResourcePlan)value);
}
break;
@@ -328,7 +328,7 @@ import org.slf4j.LoggerFactory;
switch (schemeField.id) {
case 1: // RESOURCE_PLAN
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
- struct.resourcePlan = new WMResourcePlan();
+ struct.resourcePlan = new WMFullResourcePlan();
struct.resourcePlan.read(iprot);
struct.setResourcePlanIsSet(true);
} else {
@@ -387,7 +387,7 @@ import org.slf4j.LoggerFactory;
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
- struct.resourcePlan = new WMResourcePlan();
+ struct.resourcePlan = new WMFullResourcePlan();
struct.resourcePlan.read(iprot);
struct.setResourcePlanIsSet(true);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php b/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
index ab1887e..74f39ff 100644
--- a/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/standalone-metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -21841,7 +21841,7 @@ class WMGetResourcePlanResponse {
static $_TSPEC;
/**
- * @var \metastore\WMResourcePlan
+ * @var \metastore\WMFullResourcePlan
*/
public $resourcePlan = null;
@@ -21851,7 +21851,7 @@ class WMGetResourcePlanResponse {
1 => array(
'var' => 'resourcePlan',
'type' => TType::STRUCT,
- 'class' => '\metastore\WMResourcePlan',
+ 'class' => '\metastore\WMFullResourcePlan',
),
);
}
@@ -21883,7 +21883,7 @@ class WMGetResourcePlanResponse {
{
case 1:
if ($ftype == TType::STRUCT) {
- $this->resourcePlan = new \metastore\WMResourcePlan();
+ $this->resourcePlan = new \metastore\WMFullResourcePlan();
$xfer += $this->resourcePlan->read($input);
} else {
$xfer += $input->skip($ftype);
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 1fbe1a1..8787326 100644
--- a/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/standalone-metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -15457,7 +15457,7 @@ class WMGetResourcePlanResponse:
thrift_spec = (
None, # 0
- (1, TType.STRUCT, 'resourcePlan', (WMResourcePlan, WMResourcePlan.thrift_spec), None, ), # 1
+ (1, TType.STRUCT, 'resourcePlan', (WMFullResourcePlan, WMFullResourcePlan.thrift_spec), None, ), # 1
)
def __init__(self, resourcePlan=None,):
@@ -15474,7 +15474,7 @@ class WMGetResourcePlanResponse:
break
if fid == 1:
if ftype == TType.STRUCT:
- self.resourcePlan = WMResourcePlan()
+ self.resourcePlan = WMFullResourcePlan()
self.resourcePlan.read(iprot)
else:
iprot.skip(ftype)
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb b/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index a090f79..46be4fb 100644
--- a/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -3496,7 +3496,7 @@ class WMGetResourcePlanResponse
RESOURCEPLAN = 1
FIELDS = {
- RESOURCEPLAN => {:type => ::Thrift::Types::STRUCT, :name => 'resourcePlan', :class => ::WMResourcePlan, :optional => true}
+ RESOURCEPLAN => {:type => ::Thrift::Types::STRUCT, :name => 'resourcePlan', :class => ::WMFullResourcePlan, :optional => true}
}
def struct_fields; FIELDS; end
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index f1b58c5..a1eeb29 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -7307,7 +7307,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
public WMGetResourcePlanResponse get_resource_plan(WMGetResourcePlanRequest request)
throws NoSuchObjectException, MetaException, TException {
try {
- WMResourcePlan rp = getMS().getResourcePlan(request.getResourcePlanName());
+ WMFullResourcePlan rp = getMS().getResourcePlan(request.getResourcePlanName());
WMGetResourcePlanResponse resp = new WMGetResourcePlanResponse();
resp.setResourcePlan(rp);
return resp;
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 2b6b0b6..16d08b1 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -2613,7 +2613,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
}
@Override
- public WMResourcePlan getResourcePlan(String resourcePlanName)
+ public WMFullResourcePlan getResourcePlan(String resourcePlanName)
throws NoSuchObjectException, MetaException, TException {
WMGetResourcePlanRequest request = new WMGetResourcePlanRequest();
request.setResourcePlanName(resourcePlanName);
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 6905bd4..4d68217 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -1774,7 +1774,7 @@ public interface IMetaStoreClient {
void createResourcePlan(WMResourcePlan resourcePlan, String copyFromName)
throws InvalidObjectException, MetaException, TException;
- WMResourcePlan getResourcePlan(String resourcePlanName)
+ WMFullResourcePlan getResourcePlan(String resourcePlanName)
throws NoSuchObjectException, MetaException, TException;
List<WMResourcePlan> getAllResourcePlans()
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index b708fae..5bfa624 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.metastore;
import static org.apache.commons.lang.StringUtils.join;
import static org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
-import java.util.Random;
import com.google.common.collect.Sets;
import org.apache.hadoop.hive.metastore.api.WMPoolTrigger;
import org.apache.hadoop.hive.metastore.api.WMMapping;
@@ -9702,12 +9701,18 @@ public class ObjectStore implements RawStore, Configurable {
}
@Override
- public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ public WMFullResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ boolean commited = false;
try {
- return fromMResourcePlan(getMWMResourcePlan(name, false));
+ openTransaction();
+ WMFullResourcePlan fullRp = fullFromMResourcePlan(getMWMResourcePlan(name, false));
+ commited = commitTransaction();
+ return fullRp;
} catch (InvalidOperationException e) {
// Should not happen, edit check is false.
throw new RuntimeException(e);
+ } finally {
+ rollbackAndCleanup(commited, (Query)null);
}
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
index 8af96db..fa77f63 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -756,7 +756,7 @@ public interface RawStore extends Configurable {
void createResourcePlan(WMResourcePlan resourcePlan, String copyFrom, int defaultPoolSize)
throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException;
- WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException, MetaException;
+ WMFullResourcePlan getResourcePlan(String name) throws NoSuchObjectException, MetaException;
List<WMResourcePlan> getAllResourcePlans() throws MetaException;
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index 9856f8a..e1be6b9 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -2386,7 +2386,8 @@ public class CachedStore implements RawStore, Configurable {
}
@Override
- public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException, MetaException {
+ public WMFullResourcePlan getResourcePlan(String name)
+ throws NoSuchObjectException, MetaException {
return rawStore.getResourcePlan(name);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/main/thrift/hive_metastore.thrift
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/main/thrift/hive_metastore.thrift b/standalone-metastore/src/main/thrift/hive_metastore.thrift
index c0f8b11..6eb8fd6 100644
--- a/standalone-metastore/src/main/thrift/hive_metastore.thrift
+++ b/standalone-metastore/src/main/thrift/hive_metastore.thrift
@@ -1110,7 +1110,7 @@ struct WMGetResourcePlanRequest {
}
struct WMGetResourcePlanResponse {
- 1: optional WMResourcePlan resourcePlan;
+ 1: optional WMFullResourcePlan resourcePlan;
}
struct WMGetAllResourcePlanRequest {
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index e59e349..2aa5551 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -949,7 +949,7 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
}
@Override
- public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ public WMFullResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
return objectStore.getResourcePlan(name);
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a6b88d9d/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index 8be099c..4ec5864 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -960,7 +960,7 @@ public class DummyRawStoreForJdoConnection implements RawStore {
}
@Override
- public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ public WMFullResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
return null;
}