You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by se...@apache.org on 2017/10/21 01:05:09 UTC
[12/12] hive git commit: HIVE-17771 : Implement commands to manage
resource plan (Harish Jaiprakash, reviewed by Sergey Shelukhin)
HIVE-17771 : Implement commands to manage resource plan (Harish Jaiprakash, reviewed by Sergey Shelukhin)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a7e34455
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a7e34455
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a7e34455
Branch: refs/heads/master
Commit: a7e344554aaa3fdb88fd4355dad10f5ca53fa3cf
Parents: bb851ca
Author: sergey <se...@apache.org>
Authored: Fri Oct 20 18:04:44 2017 -0700
Committer: sergey <se...@apache.org>
Committed: Fri Oct 20 18:04:44 2017 -0700
----------------------------------------------------------------------
hcatalog/webhcat/java-client/pom.xml | 2 +-
.../listener/DummyRawStoreFailEvent.java | 34 +
.../test/resources/testconfiguration.properties | 1 +
.../hadoop/hive/metastore/HiveMetaStore.java | 96 +-
.../hive/metastore/HiveMetaStoreClient.java | 50 +-
.../hadoop/hive/metastore/IMetaStoreClient.java | 18 +
.../DummyRawStoreControlledCommit.java | 34 +
.../DummyRawStoreForJdoConnection.java | 33 +-
.../org/apache/hadoop/hive/ql/exec/DDLTask.java | 84 +-
.../apache/hadoop/hive/ql/metadata/Hive.java | 55 +-
.../formatting/JsonMetaDataFormatter.java | 30 +
.../metadata/formatting/MetaDataFormatter.java | 4 +
.../formatting/TextMetaDataFormatter.java | 23 +-
.../hive/ql/parse/DDLSemanticAnalyzer.java | 105 +-
.../org/apache/hadoop/hive/ql/parse/HiveLexer.g | 5 +
.../apache/hadoop/hive/ql/parse/HiveParser.g | 52 +
.../hive/ql/parse/SemanticAnalyzerFactory.java | 8 +
.../hive/ql/plan/AlterResourcePlanDesc.java | 91 +
.../hive/ql/plan/CreateResourcePlanDesc.java | 51 +
.../org/apache/hadoop/hive/ql/plan/DDLWork.java | 63 +
.../hive/ql/plan/DropResourcePlanDesc.java | 28 +
.../hadoop/hive/ql/plan/HiveOperation.java | 6 +-
.../hive/ql/plan/ShowResourcePlanDesc.java | 65 +
.../authorization/plugin/HiveOperationType.java | 5 +
.../test/queries/clientpositive/resourceplan.q | 106 +
.../clientpositive/llap/resourceplan.q.out | 3181 +++
.../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp | 25172 +++++++++--------
.../gen/thrift/gen-cpp/ThriftHiveMetastore.h | 1334 +-
.../ThriftHiveMetastore_server.skeleton.cpp | 30 +
.../gen/thrift/gen-cpp/hive_metastore_types.cpp | 1658 +-
.../gen/thrift/gen-cpp/hive_metastore_types.h | 572 +-
.../hive/metastore/api/ThriftHiveMetastore.java | 9114 +++++-
.../api/WMAlterResourcePlanRequest.java | 504 +
.../api/WMAlterResourcePlanResponse.java | 283 +
.../api/WMCreateResourcePlanRequest.java | 398 +
.../api/WMCreateResourcePlanResponse.java | 283 +
.../api/WMDropResourcePlanRequest.java | 393 +
.../api/WMDropResourcePlanResponse.java | 283 +
.../api/WMGetAllResourcePlanRequest.java | 283 +
.../api/WMGetAllResourcePlanResponse.java | 447 +
.../metastore/api/WMGetResourcePlanRequest.java | 393 +
.../api/WMGetResourcePlanResponse.java | 398 +
.../hive/metastore/api/WMResourcePlan.java | 36 +-
.../metastore/api/WMResourcePlanStatus.java | 48 +
.../api/WMValidateResourcePlanRequest.java | 393 +
.../api/WMValidateResourcePlanResponse.java | 390 +
.../gen-php/metastore/ThriftHiveMetastore.php | 3010 +-
.../src/gen/thrift/gen-php/metastore/Types.php | 889 +-
.../src/gen/thrift/gen-py/__init__.py | 0
.../hive_metastore/ThriftHiveMetastore-remote | 42 +
.../hive_metastore/ThriftHiveMetastore.py | 5708 ++--
.../gen/thrift/gen-py/hive_metastore/ttypes.py | 756 +-
.../gen/thrift/gen-rb/hive_metastore_types.rb | 203 +-
.../gen/thrift/gen-rb/thrift_hive_metastore.rb | 406 +
.../hadoop/hive/metastore/ObjectStore.java | 226 +
.../apache/hadoop/hive/metastore/RawStore.java | 16 +
.../hive/metastore/cache/CachedStore.java | 34 +
.../src/main/thrift/hive_metastore.thrift | 78 +-
58 files changed, 42204 insertions(+), 15806 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/hcatalog/webhcat/java-client/pom.xml
----------------------------------------------------------------------
diff --git a/hcatalog/webhcat/java-client/pom.xml b/hcatalog/webhcat/java-client/pom.xml
index 0c9fc15..ea51854 100644
--- a/hcatalog/webhcat/java-client/pom.xml
+++ b/hcatalog/webhcat/java-client/pom.xml
@@ -91,7 +91,7 @@
</dependencies>
<build>
<sourceDirectory>${basedir}/src/main/java</sourceDirectory>
- <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
+ <testSourceDirectory>${basedir}/src/test/java</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
----------------------------------------------------------------------
diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
index 5bd146d..431186e 100644
--- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
+++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -57,6 +58,7 @@ import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
@@ -974,4 +976,36 @@ public class DummyRawStoreFailEvent implements RawStore, Configurable {
public String getMetastoreDbUuid() throws MetaException {
throw new MetaException("getMetastoreDbUuid is not implemented");
}
+
+ @Override
+ public void createResourcePlan(WMResourcePlan resourcePlan) throws MetaException {
+ objectStore.createResourcePlan(resourcePlan);
+ }
+
+ @Override
+ public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ return objectStore.getResourcePlan(name);
+ }
+
+ @Override
+ public List<WMResourcePlan> getAllResourcePlans() throws MetaException {
+ return objectStore.getAllResourcePlans();
+ }
+
+ @Override
+ public void alterResourcePlan(String name, WMResourcePlan resourcePlan)
+ throws NoSuchObjectException, InvalidOperationException, MetaException {
+ objectStore.alterResourcePlan(name, resourcePlan);
+ }
+
+ @Override
+ public boolean validateResourcePlan(String name)
+ throws NoSuchObjectException, InvalidObjectException, MetaException {
+ return objectStore.validateResourcePlan(name);
+ }
+
+ @Override
+ public void dropResourcePlan(String name) throws NoSuchObjectException, MetaException {
+ objectStore.dropResourcePlan(name);
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 06ebc98..a081638 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -568,6 +568,7 @@ minillaplocal.query.files=\
ptf.q,\
ptf_streaming.q,\
quotedid_smb.q,\
+ resourceplan.q,\
sample10.q,\
schema_evol_orc_acid_part.q,\
schema_evol_orc_acid_part_update.q,\
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 23033fa..6783e5b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -25,8 +25,6 @@ import static org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.PrivilegedExceptionAction;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Arrays;
@@ -162,8 +160,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.commons.lang.StringUtils.join;
-
import com.facebook.fb303.FacebookBase;
import com.facebook.fb303.fb_status;
import com.google.common.annotations.VisibleForTesting;
@@ -3052,11 +3048,11 @@ public class HiveMetaStore extends ThriftHiveMetastore {
throw new RuntimeException(e);
}
- partFutures.add(threadPool.submit(new Callable() {
- @Override public Object call() throws Exception {
- ugi.doAs(new PrivilegedExceptionAction<Object>() {
+ partFutures.add(threadPool.submit(new Callable<Partition>() {
+ @Override public Partition call() throws Exception {
+ ugi.doAs(new PrivilegedExceptionAction<Partition>() {
@Override
- public Object run() throws Exception {
+ public Partition run() throws Exception {
try {
boolean madeDir = createLocationForAddedPartition(table, part);
if (addedPartitions.put(new PartValEqWrapperLite(part), madeDir) != null) {
@@ -3592,14 +3588,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
Table tbl = null;
List<Partition> parts = null;
boolean mustPurge = false;
- boolean isExternalTbl = false;
List<Map<String, String>> transactionalListenerResponses = Lists.newArrayList();
try {
// We need Partition-s for firing events and for result; DN needs MPartition-s to drop.
// Great... Maybe we could bypass fetching MPartitions by issuing direct SQL deletes.
tbl = get_table_core(dbName, tblName);
- isExternalTbl = isExternal(tbl);
+ isExternal(tbl);
mustPurge = isMustPurge(envContext, tbl);
int minCount = 0;
RequestPartsSpec spec = request.getParts();
@@ -4533,7 +4528,6 @@ public class HiveMetaStore extends ThriftHiveMetastore {
}
}
- @SuppressWarnings("deprecation")
Deserializer s = MetaStoreUtils.getDeserializer(curConf, tbl, false);
ret = MetaStoreUtils.getFieldsFromDeserializer(tableName, s);
} catch (SerDeException e) {
@@ -7386,6 +7380,84 @@ public class HiveMetaStore extends ThriftHiveMetastore {
throw e;
}
}
+
+
+ @Override
+ public WMCreateResourcePlanResponse create_resource_plan(WMCreateResourcePlanRequest request)
+ throws AlreadyExistsException, InvalidObjectException, MetaException, TException {
+ try {
+ getMS().createResourcePlan(request.getResourcePlan());
+ return new WMCreateResourcePlanResponse();
+ } catch (MetaException e) {
+ LOG.error("Exception while trying to persist resource plan", e);
+ throw e;
+ }
+ }
+
+ @Override
+ public WMGetResourcePlanResponse get_resource_plan(WMGetResourcePlanRequest request)
+ throws NoSuchObjectException, MetaException, TException {
+ try {
+ WMResourcePlan rp = getMS().getResourcePlan(request.getResourcePlanName());
+ WMGetResourcePlanResponse resp = new WMGetResourcePlanResponse();
+ resp.setResourcePlan(rp);
+ return resp;
+ } catch (MetaException e) {
+ LOG.error("Exception while trying to retrieve resource plan", e);
+ throw e;
+ }
+ }
+
+ @Override
+ public WMGetAllResourcePlanResponse get_all_resource_plans(WMGetAllResourcePlanRequest request)
+ throws MetaException, TException {
+ try {
+ WMGetAllResourcePlanResponse resp = new WMGetAllResourcePlanResponse();
+ resp.setResourcePlans(getMS().getAllResourcePlans());
+ return resp;
+ } catch (MetaException e) {
+ LOG.error("Exception while trying to retrieve resource plans", e);
+ throw e;
+ }
+ }
+
+ @Override
+ public WMAlterResourcePlanResponse alter_resource_plan(WMAlterResourcePlanRequest request)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
+ try {
+ getMS().alterResourcePlan(request.getResourcePlanName(), request.getResourcePlan());
+ return new WMAlterResourcePlanResponse();
+ } catch (MetaException e) {
+ LOG.error("Exception while trying to alter resource plan", e);
+ throw e;
+ }
+ }
+
+ @Override
+ public WMValidateResourcePlanResponse validate_resource_plan(WMValidateResourcePlanRequest request)
+ throws NoSuchObjectException, MetaException, TException {
+ try {
+ boolean isValid = getMS().validateResourcePlan(request.getResourcePlanName());
+ WMValidateResourcePlanResponse resp = new WMValidateResourcePlanResponse();
+ resp.setIsValid(isValid);
+ return resp;
+ } catch (MetaException e) {
+ LOG.error("Exception while trying to validate resource plan", e);
+ throw e;
+ }
+ }
+
+ @Override
+ public WMDropResourcePlanResponse drop_resource_plan(WMDropResourcePlanRequest request)
+ throws NoSuchObjectException, InvalidOperationException, MetaException, TException {
+ try {
+ getMS().dropResourcePlan(request.getResourcePlanName());
+ return new WMDropResourcePlanResponse();
+ } catch (MetaException e) {
+ LOG.error("Exception while trying to retrieve resource plans", e);
+ throw e;
+ }
+ }
}
public static IHMSHandler newRetryingHMSHandler(IHMSHandler baseHandler, HiveConf hiveConf)
@@ -7975,4 +8047,4 @@ public class HiveMetaStore extends ThriftHiveMetastore {
}
return fmHandlers;
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 3f5f80e..0e24ba7 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -208,7 +208,7 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
}
// make metastore URIS random
- List uriList = Arrays.asList(metastoreUris);
+ List<?> uriList = Arrays.asList(metastoreUris);
Collections.shuffle(uriList);
metastoreUris = (URI[]) uriList.toArray();
} catch (IllegalArgumentException e) {
@@ -2635,4 +2635,52 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
public String getMetastoreDbUuid() throws TException {
return client.get_metastore_db_uuid();
}
+
+ @Override
+ public void createResourcePlan(WMResourcePlan resourcePlan)
+ throws InvalidObjectException, MetaException, TException {
+ WMCreateResourcePlanRequest request = new WMCreateResourcePlanRequest();
+ request.setResourcePlan(resourcePlan);
+ client.create_resource_plan(request);
+ }
+
+ @Override
+ public WMResourcePlan getResourcePlan(String resourcePlanName)
+ throws NoSuchObjectException, MetaException, TException {
+ WMGetResourcePlanRequest request = new WMGetResourcePlanRequest();
+ request.setResourcePlanName(resourcePlanName);
+ return client.get_resource_plan(request).getResourcePlan();
+ }
+
+ @Override
+ public List<WMResourcePlan> getAllResourcePlans()
+ throws NoSuchObjectException, MetaException, TException {
+ WMGetAllResourcePlanRequest request = new WMGetAllResourcePlanRequest();
+ return client.get_all_resource_plans(request).getResourcePlans();
+ }
+
+ @Override
+ public void dropResourcePlan(String resourcePlanName)
+ throws NoSuchObjectException, MetaException, TException {
+ WMDropResourcePlanRequest request = new WMDropResourcePlanRequest();
+ request.setResourcePlanName(resourcePlanName);
+ client.drop_resource_plan(request);
+ }
+
+ @Override
+ public void alterResourcePlan(String resourcePlanName, WMResourcePlan resourcePlan)
+ throws NoSuchObjectException, InvalidObjectException, MetaException, TException {
+ WMAlterResourcePlanRequest request = new WMAlterResourcePlanRequest();
+ request.setResourcePlanName(resourcePlanName);
+ request.setResourcePlan(resourcePlan);
+ client.alter_resource_plan(request);
+ }
+
+ @Override
+ public boolean validateResourcePlan(String resourcePlanName)
+ throws NoSuchObjectException, InvalidObjectException, MetaException, TException {
+ WMValidateResourcePlanRequest request = new WMValidateResourcePlanRequest();
+ request.setResourcePlanName(resourcePlanName);
+ return client.validate_resource_plan(request).isIsValid();
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index a08fc72..62a3735 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -86,6 +86,7 @@ import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
@@ -1766,4 +1767,21 @@ public interface IMetaStoreClient {
*/
String getMetastoreDbUuid() throws MetaException, TException;
+ void createResourcePlan(WMResourcePlan resourcePlan)
+ throws InvalidObjectException, MetaException, TException;
+
+ WMResourcePlan getResourcePlan(String resourcePlanName)
+ throws NoSuchObjectException, MetaException, TException;
+
+ List<WMResourcePlan> getAllResourcePlans()
+ throws NoSuchObjectException, MetaException, TException;
+
+ void dropResourcePlan(String resourcePlanName)
+ throws NoSuchObjectException, MetaException, TException;
+
+ void alterResourcePlan(String resourcePlanName, WMResourcePlan resourcePlan)
+ throws NoSuchObjectException, InvalidObjectException, MetaException, TException;
+
+ boolean validateResourcePlan(String resourcePlanName)
+ throws NoSuchObjectException, InvalidObjectException, MetaException, TException;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
index a75dbb0..a6f4ee2 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -52,6 +53,7 @@ import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
@@ -933,4 +935,36 @@ public class DummyRawStoreControlledCommit implements RawStore, Configurable {
public String getMetastoreDbUuid() throws MetaException {
throw new MetaException("Get metastore uuid is not implemented");
}
+
+ @Override
+ public void createResourcePlan(WMResourcePlan resourcePlan) throws MetaException {
+ objectStore.createResourcePlan(resourcePlan);
+ }
+
+ @Override
+ public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ return objectStore.getResourcePlan(name);
+ }
+
+ @Override
+ public List<WMResourcePlan> getAllResourcePlans() throws MetaException {
+ return objectStore.getAllResourcePlans();
+ }
+
+ @Override
+ public void alterResourcePlan(String name, WMResourcePlan resourcePlan)
+ throws NoSuchObjectException, InvalidOperationException, MetaException {
+ objectStore.alterResourcePlan(name, resourcePlan);
+ }
+
+ @Override
+ public boolean validateResourcePlan(String name)
+ throws NoSuchObjectException, InvalidObjectException, MetaException {
+ return objectStore.validateResourcePlan(name);
+ }
+
+ @Override
+ public void dropResourcePlan(String name) throws NoSuchObjectException, MetaException {
+ objectStore.dropResourcePlan(name);
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
----------------------------------------------------------------------
diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
index f388066..320ade1 100644
--- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
+++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
@@ -23,7 +23,7 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
-import junit.framework.Assert;
+import org.junit.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -53,6 +54,7 @@ import org.apache.hadoop.hive.metastore.api.PartitionValuesResponse;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
@@ -946,4 +948,33 @@ public class DummyRawStoreForJdoConnection implements RawStore {
public String getMetastoreDbUuid() throws MetaException {
throw new MetaException("Get metastore uuid is not implemented");
}
+
+ @Override
+ public void createResourcePlan(WMResourcePlan resourcePlan) throws MetaException {
+ }
+
+ @Override
+ public WMResourcePlan getResourcePlan(String name) throws NoSuchObjectException {
+ return null;
+ }
+
+ @Override
+ public List<WMResourcePlan> getAllResourcePlans() throws MetaException {
+ return null;
+ }
+
+ @Override
+ public void alterResourcePlan(String name, WMResourcePlan resourcePlan)
+ throws NoSuchObjectException, InvalidOperationException, MetaException {
+ }
+
+ @Override
+ public boolean validateResourcePlan(String name)
+ throws NoSuchObjectException, InvalidObjectException, MetaException {
+ return false;
+ }
+
+ @Override
+ public void dropResourcePlan(String name) throws NoSuchObjectException, MetaException {
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 88a2cdd..65c9091 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -59,7 +59,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.StatsSetupConst;
-import org.apache.hadoop.hive.common.ValidReadTxnList;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.conf.Constants;
@@ -88,6 +87,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
@@ -161,6 +161,7 @@ import org.apache.hadoop.hive.ql.plan.AbortTxnsDesc;
import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
+import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
@@ -170,6 +171,7 @@ import org.apache.hadoop.hive.ql.plan.CacheMetadataDesc;
import org.apache.hadoop.hive.ql.plan.ColStatistics;
import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
+import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
@@ -179,6 +181,7 @@ import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DescTableDesc;
import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
+import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.FileMergeDesc;
import org.apache.hadoop.hive.ql.plan.GrantDesc;
@@ -212,6 +215,7 @@ import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc;
import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc;
@@ -607,6 +611,22 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
if (killQueryDesc != null) {
return killQuery(db, killQueryDesc);
}
+
+ if (work.getCreateResourcePlanDesc() != null) {
+ return createResourcePlan(db, work.getCreateResourcePlanDesc());
+ }
+
+ if (work.getShowResourcePlanDesc() != null) {
+ return showResourcePlans(db, work.getShowResourcePlanDesc());
+ }
+
+ if (work.getAlterResourcePlanDesc() != null) {
+ return alterResourcePlan(db, work.getAlterResourcePlanDesc());
+ }
+
+ if (work.getDropResourcePlanDesc() != null) {
+ return dropResourcePlan(db, work.getDropResourcePlanDesc());
+ }
} catch (Throwable e) {
failed(e);
return 1;
@@ -615,6 +635,68 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
return 0;
}
+ private int createResourcePlan(Hive db, CreateResourcePlanDesc createResourcePlanDesc)
+ throws HiveException {
+ WMResourcePlan resourcePlan = new WMResourcePlan();
+ resourcePlan.setName(createResourcePlanDesc.getName());
+ if (createResourcePlanDesc.getQueryParallelism() != null) {
+ resourcePlan.setQueryParallelism(createResourcePlanDesc.getQueryParallelism());
+ }
+ db.createResourcePlan(resourcePlan);
+ return 0;
+ }
+
+ private int showResourcePlans(Hive db, ShowResourcePlanDesc showResourcePlanDesc)
+ throws HiveException {
+ // Note: Enhance showResourcePlan to display all the pools, triggers and mappings.
+ DataOutputStream out = getOutputStream(showResourcePlanDesc.getResFile());
+ try {
+ List<WMResourcePlan> resourcePlans;
+ String rpName = showResourcePlanDesc.getResourcePlanName();
+ if (rpName != null) {
+ resourcePlans = Collections.singletonList(db.getResourcePlan(rpName));
+ } else {
+ resourcePlans = db.geAllResourcePlans();
+ }
+ formatter.showResourcePlans(out, resourcePlans);
+ } catch (Exception e) {
+ throw new HiveException(e);
+ } finally {
+ IOUtils.closeStream(out);
+ }
+ return 0;
+ }
+
+ private int alterResourcePlan(Hive db, AlterResourcePlanDesc desc) throws HiveException {
+ if (desc.shouldValidate()) {
+ return db.validateResourcePlan(desc.getRpName()) ? 0 : 1;
+ }
+
+ WMResourcePlan resourcePlan = new WMResourcePlan();
+
+ if (desc.getNewName() != null) {
+ resourcePlan.setName(desc.getNewName());
+ } else {
+ resourcePlan.setName(desc.getRpName());
+ }
+
+ if (desc.getQueryParallelism() != null) {
+ resourcePlan.setQueryParallelism(desc.getQueryParallelism());
+ }
+
+ if (desc.getStatus() != null) {
+ resourcePlan.setStatus(desc.getStatus());
+ }
+
+ db.alterResourcePlan(desc.getRpName(), resourcePlan);
+ return 0;
+ }
+
+ private int dropResourcePlan(Hive db, DropResourcePlanDesc desc) throws HiveException {
+ db.dropResourcePlan(desc.getRpName());
+ return 0;
+ }
+
private int preInsertWork(Hive db, PreInsertTableDesc preInsertTableDesc) throws HiveException {
try{
HiveMetaHook hook = preInsertTableDesc.getTable().getStorageHandler().getMetaHook();
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index f31d7f8..ed9d576 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -53,9 +53,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableMap;
-
import javax.jdo.JDODataStoreException;
import org.apache.calcite.plan.RelOptMaterialization;
@@ -64,7 +61,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hive.common.FileUtils;
@@ -122,6 +118,7 @@ import org.apache.hadoop.hive.metastore.api.PrimaryKeysRequest;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
@@ -4698,4 +4695,54 @@ private void constructOneLBLocationMap(FileStatus fSta,
throw new HiveException(e);
}
}
+
+ public void createResourcePlan(WMResourcePlan resourcePlan) throws HiveException {
+ try {
+ getMSC().createResourcePlan(resourcePlan);
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ public WMResourcePlan getResourcePlan(String rpName) throws HiveException {
+ try {
+ return getMSC().getResourcePlan(rpName);
+ } catch (NoSuchObjectException e) {
+ return null;
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ public List<WMResourcePlan> geAllResourcePlans() throws HiveException {
+ try {
+ return getMSC().getAllResourcePlans();
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ public void dropResourcePlan(String rpName) throws HiveException {
+ try {
+ getMSC().dropResourcePlan(rpName);
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ public void alterResourcePlan(String rpName, WMResourcePlan resourcePlan) throws HiveException {
+ try {
+ getMSC().alterResourcePlan(rpName, resourcePlan);
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ public boolean validateResourcePlan(String rpName) throws HiveException {
+ try {
+ return getMSC().validateResourcePlan(rpName);
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
};
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
index bdf1b26..f1258ba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
@@ -28,6 +28,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -38,6 +39,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -46,6 +48,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.UniqueConstraint;
+import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
/**
@@ -414,4 +417,31 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
}
asJson(out, builder.build());
}
+
+ @Override
+ public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
+ throws HiveException {
+ JsonGenerator generator = null;
+ try {
+ generator = new ObjectMapper().getJsonFactory().createJsonGenerator(out);
+ generator.writeStartArray();
+ for (WMResourcePlan plan : resourcePlans) {
+ generator.writeStartObject();
+ generator.writeStringField("name", plan.getName());
+ generator.writeStringField("status", plan.getStatus().name());
+ if (plan.isSetQueryParallelism()) {
+ generator.writeNumberField("queryParallelism", plan.getQueryParallelism());
+ }
+ generator.writeEndObject();
+ }
+ generator.writeEndArray();
+ generator.close();
+ } catch (IOException e) {
+ throw new HiveException(e);
+ } finally {
+ if (generator != null) {
+ IOUtils.closeQuietly(generator);
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
index ac5b306..405acdd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
@@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -119,5 +120,8 @@ public interface MetaDataFormatter {
public void showDatabaseDescription (DataOutputStream out, String database, String comment,
String location, String ownerName, String ownerType, Map<String, String> params)
throws HiveException;
+
+ public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
+ throws HiveException;
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index 765aa65..9df1b82 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hive.ql.metadata.formatting;
import java.io.DataOutputStream;
-import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
@@ -41,6 +40,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -543,4 +543,25 @@ class TextMetaDataFormatter implements MetaDataFormatter {
throw new HiveException(e);
}
}
+
+ public void showResourcePlans(DataOutputStream out, List<WMResourcePlan> resourcePlans)
+ throws HiveException {
+ try {
+ for (WMResourcePlan plan : resourcePlans) {
+ out.write(plan.getName().getBytes("UTF-8"));
+ out.write(separator);
+ if (plan.isSetQueryParallelism()) {
+ out.writeBytes(Integer.toString(plan.getQueryParallelism()));
+ } else {
+ out.writeBytes("null");
+ }
+ out.write(separator);
+ out.write(plan.getStatus().name().getBytes("UTF-8"));
+ out.write(terminator);
+ }
+ } catch (IOException e) {
+ throw new HiveException(e);
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 941a067..dc29b12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -48,7 +48,7 @@ import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryState;
@@ -86,6 +86,7 @@ import org.apache.hadoop.hive.ql.plan.AddPartitionDesc.OnePartitionDesc;
import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes;
+import org.apache.hadoop.hive.ql.plan.AlterResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableAlterPartDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
import org.apache.hadoop.hive.ql.plan.AlterTableDesc.AlterTableTypes;
@@ -95,12 +96,14 @@ import org.apache.hadoop.hive.ql.plan.CacheMetadataDesc;
import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
import org.apache.hadoop.hive.ql.plan.CreateDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.CreateIndexDesc;
+import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DescDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
import org.apache.hadoop.hive.ql.plan.DescTableDesc;
import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
import org.apache.hadoop.hive.ql.plan.DropIndexDesc;
+import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.DropTableDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -129,6 +132,7 @@ import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc;
import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowResourcePlanDesc;
import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
import org.apache.hadoop.hive.ql.plan.ShowTablesDesc;
import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc;
@@ -542,6 +546,19 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
case HiveParser.TOK_CACHE_METADATA:
analyzeCacheMetadata(ast);
break;
+ case HiveParser.TOK_CREATERESOURCEPLAN:
+ analyzeCreateResourcePlan(ast);
+ break;
+ case HiveParser.TOK_SHOWRESOURCEPLAN:
+ ctx.setResFile(ctx.getLocalTmpPath());
+ analyzeShowResourcePlan(ast);
+ break;
+ case HiveParser.TOK_ALTER_RP:
+ analyzeAlterResourcePlan(ast);
+ break;
+ case HiveParser.TOK_DROP_RP:
+ analyzeDropResourcePlan(ast);
+ break;
default:
throw new SemanticException("Unsupported command: " + ast);
}
@@ -821,6 +838,92 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
return counter;
}
+ private void analyzeCreateResourcePlan(ASTNode ast) throws SemanticException {
+ if (ast.getChildCount() == 0) {
+ throw new SemanticException("Expected name in CREATE RESOURCE PLAN statement");
+ }
+ String resourcePlanName = unescapeIdentifier(ast.getChild(0).getText());
+ Integer queryParallelism = null;
+ if (ast.getChildCount() > 1) {
+ queryParallelism = Integer.parseInt(ast.getChild(1).getText());
+ }
+ if (ast.getChildCount() > 2) {
+ throw new SemanticException("Invalid token in CREATE RESOURCE PLAN statement");
+ }
+ CreateResourcePlanDesc desc = new CreateResourcePlanDesc(resourcePlanName, queryParallelism);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc), conf));
+ }
+
+ private void analyzeShowResourcePlan(ASTNode ast) throws SemanticException {
+ String rpName = null;
+ if (ast.getChildCount() > 0) {
+ rpName = unescapeIdentifier(ast.getChild(0).getText());
+ }
+ if (ast.getChildCount() > 1) {
+ throw new SemanticException("Invalid syntax for SHOW RESOURCE PLAN statement");
+ }
+ ShowResourcePlanDesc showResourcePlanDesc = new ShowResourcePlanDesc(rpName, ctx.getResFile());
+ rootTasks.add(TaskFactory.get(
+ new DDLWork(getInputs(), getOutputs(), showResourcePlanDesc), conf));
+ setFetchTask(createFetchTask(showResourcePlanDesc.getSchema()));
+ }
+
+ private void analyzeAlterResourcePlan(ASTNode ast) throws SemanticException {
+ if (ast.getChildCount() == 0) {
+ throw new SemanticException("Expected name in ALTER RESOURCE PLAN statement");
+ }
+ String rpName = unescapeIdentifier(ast.getChild(0).getText());
+ if (ast.getChildCount() < 2) {
+ throw new SemanticException("Invalid syntax for ALTER RESOURCE PLAN statement");
+ }
+ AlterResourcePlanDesc desc;
+ switch (ast.getChild(1).getType()) {
+ case HiveParser.TOK_VALIDATE:
+ desc = AlterResourcePlanDesc.createValidatePlan(rpName);
+ break;
+ case HiveParser.TOK_ACTIVATE:
+ desc = AlterResourcePlanDesc.createChangeStatus(rpName, WMResourcePlanStatus.ACTIVE);
+ break;
+ case HiveParser.TOK_ENABLE:
+ desc = AlterResourcePlanDesc.createChangeStatus(rpName, WMResourcePlanStatus.ENABLED);
+ break;
+ case HiveParser.TOK_DISABLE:
+ desc = AlterResourcePlanDesc.createChangeStatus(rpName, WMResourcePlanStatus.DISABLED);
+ break;
+ case HiveParser.TOK_QUERY_PARALLELISM:
+ if (ast.getChildCount() != 3) {
+ throw new SemanticException(
+ "Expected number for query parallelism in alter resource plan statment");
+ }
+ int queryParallelism = Integer.parseInt(ast.getChild(2).getText());
+ desc = AlterResourcePlanDesc.createChangeParallelism(rpName, queryParallelism);
+ break;
+ case HiveParser.TOK_RENAME:
+ if (ast.getChildCount() != 3) {
+ throw new SemanticException(
+ "Expected new name for rename in alter resource plan statment");
+ }
+ String name = ast.getChild(2).getText();
+ desc = AlterResourcePlanDesc.createRenamePlan(rpName, name);
+ break;
+ default:
+ throw new SemanticException("Unexpected token in alter resource plan statement: "
+ + ast.getChild(1).getType());
+ }
+ rootTasks.add(TaskFactory.get(
+ new DDLWork(getInputs(), getOutputs(), desc), conf));
+ }
+
+ private void analyzeDropResourcePlan(ASTNode ast) throws SemanticException {
+ if (ast.getChildCount() == 0) {
+ throw new SemanticException("Expected name in DROP RESOURCE PLAN statement");
+ }
+ String rpName = unescapeIdentifier(ast.getChild(0).getText());
+ DropResourcePlanDesc desc = new DropResourcePlanDesc(rpName);
+ rootTasks.add(TaskFactory.get(
+ new DDLWork(getInputs(), getOutputs(), desc), conf));
+ }
+
private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
String dbName = unescapeIdentifier(ast.getChild(0).getText());
boolean ifNotExists = false;
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index 74f9d95..99ed71a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -353,6 +353,11 @@ KW_OPERATOR: 'OPERATOR';
KW_EXPRESSION: 'EXPRESSION';
KW_DETAIL: 'DETAIL';
KW_WAIT: 'WAIT';
+KW_RESOURCE: 'RESOURCE';
+KW_PLAN: 'PLAN';
+KW_QUERY_PARALLELISM: 'QUERY_PARALLELISM';
+KW_PLANS: 'PLANS';
+KW_ACTIVATE: 'ACTIVATE';
// Operators
// NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index a2e55ed..053393c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -405,6 +405,14 @@ TOK_EXPRESSION;
TOK_DETAIL;
TOK_BLOCKING;
TOK_KILL_QUERY;
+TOK_CREATERESOURCEPLAN;
+TOK_SHOWRESOURCEPLAN;
+TOK_ALTER_RP;
+TOK_DROP_RP;
+TOK_VALIDATE;
+TOK_ACTIVATE;
+TOK_QUERY_PARALLELISM;
+TOK_RENAME;
}
@@ -577,6 +585,11 @@ import org.apache.hadoop.hive.conf.HiveConf;
xlateMap.put("KW_WAIT", "WAIT");
xlateMap.put("KW_KILL", "KILL");
xlateMap.put("KW_QUERY", "QUERY");
+ xlateMap.put("KW_RESOURCE", "RESOURCE");
+ xlateMap.put("KW_PLAN", "PLAN");
+ xlateMap.put("KW_QUERY_PARALLELISM", "QUERY_PARALLELISM");
+ xlateMap.put("KW_PLANS", "PLANS");
+ xlateMap.put("KW_ACTIVATE", "ACTIVATE");
// Operators
xlateMap.put("DOT", ".");
@@ -915,6 +928,9 @@ ddlStatement
| showCurrentRole
| abortTransactionStatement
| killQueryStatement
+ | createResourcePlanStatement
+ | alterResourcePlanStatement
+ | dropResourcePlanStatement
;
ifExists
@@ -968,6 +984,37 @@ orReplace
-> ^(TOK_ORREPLACE)
;
+createResourcePlanStatement
+@init { pushMsg("create resource plan statement", state); }
+@after { popMsg(state); }
+ : KW_CREATE KW_RESOURCE KW_PLAN
+ name=identifier
+ (KW_WITH KW_QUERY_PARALLELISM parallelism=Number)?
+ -> ^(TOK_CREATERESOURCEPLAN $name $parallelism?)
+ ;
+
+alterResourcePlanStatement
+@init { pushMsg("alter resource plan statement", state); }
+@after { popMsg(state); }
+ : KW_ALTER KW_RESOURCE KW_PLAN name=identifier (
+ (KW_VALIDATE -> ^(TOK_ALTER_RP $name TOK_VALIDATE))
+ | (KW_ACTIVATE -> ^(TOK_ALTER_RP $name TOK_ACTIVATE))
+ | (KW_ENABLE -> ^(TOK_ALTER_RP $name TOK_ENABLE))
+ | (KW_DISABLE -> ^(TOK_ALTER_RP $name TOK_DISABLE))
+ | (KW_SET KW_QUERY_PARALLELISM EQUAL parallelism=Number
+ -> ^(TOK_ALTER_RP $name TOK_QUERY_PARALLELISM $parallelism))
+ | (KW_RENAME KW_TO newName=identifier
+ -> ^(TOK_ALTER_RP $name TOK_RENAME $newName))
+ )
+ ;
+
+dropResourcePlanStatement
+@init { pushMsg("drop resource plan statement", state); }
+@after { popMsg(state); }
+ : KW_DROP KW_RESOURCE KW_PLAN name=identifier
+ -> ^(TOK_DROP_RP $name)
+ ;
+
createDatabaseStatement
@init { pushMsg("create database statement", state); }
@after { popMsg(state); }
@@ -1595,6 +1642,11 @@ showStatement
| KW_SHOW KW_COMPACTIONS -> ^(TOK_SHOW_COMPACTIONS)
| KW_SHOW KW_TRANSACTIONS -> ^(TOK_SHOW_TRANSACTIONS)
| KW_SHOW KW_CONF StringLiteral -> ^(TOK_SHOWCONF StringLiteral)
+ | KW_SHOW KW_RESOURCE
+ (
+ (KW_PLAN rp_name=identifier -> ^(TOK_SHOWRESOURCEPLAN $rp_name))
+ | (KW_PLANS -> ^(TOK_SHOWRESOURCEPLAN))
+ )
;
lockStatement
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index f481308..e9f6b55 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -134,6 +134,10 @@ public final class SemanticAnalyzerFactory {
commandType.put(HiveParser.TOK_REPL_LOAD, HiveOperation.REPLLOAD);
commandType.put(HiveParser.TOK_REPL_STATUS, HiveOperation.REPLSTATUS);
commandType.put(HiveParser.TOK_KILL_QUERY, HiveOperation.KILL_QUERY);
+ commandType.put(HiveParser.TOK_CREATERESOURCEPLAN, HiveOperation.CREATE_RESOURCEPLAN);
+ commandType.put(HiveParser.TOK_SHOWRESOURCEPLAN, HiveOperation.SHOW_RESOURCEPLAN);
+ commandType.put(HiveParser.TOK_ALTER_RP, HiveOperation.ALTER_RESOURCEPLAN);
+ commandType.put(HiveParser.TOK_DROP_RP, HiveOperation.DROP_RESOURCEPLAN);
}
static {
@@ -309,6 +313,10 @@ public final class SemanticAnalyzerFactory {
case HiveParser.TOK_SHOW_SET_ROLE:
case HiveParser.TOK_CACHE_METADATA:
case HiveParser.TOK_KILL_QUERY:
+ case HiveParser.TOK_CREATERESOURCEPLAN:
+ case HiveParser.TOK_SHOWRESOURCEPLAN:
+ case HiveParser.TOK_ALTER_RP:
+ case HiveParser.TOK_DROP_RP:
return new DDLSemanticAnalyzer(queryState);
case HiveParser.TOK_CREATEFUNCTION:
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
new file mode 100644
index 0000000..f0658a6
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterResourcePlanDesc.java
@@ -0,0 +1,91 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.metastore.api.WMResourcePlanStatus;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Alter Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterResourcePlanDesc extends DDLDesc implements Serializable {
+ private static final long serialVersionUID = -3514685833183437279L;
+
+ private String rpName;
+ private String newName;
+ private Integer queryParallelism;
+ private WMResourcePlanStatus status;
+ private boolean validate;
+
+ public AlterResourcePlanDesc() {}
+
+ private AlterResourcePlanDesc(String rpName, String newName, Integer queryParallelism,
+ WMResourcePlanStatus status, boolean validate) {
+ this.rpName = rpName;
+ this.newName = newName;
+ this.queryParallelism = queryParallelism;
+ this.status = status;
+ this.validate = validate;
+ }
+
+ public static AlterResourcePlanDesc createChangeParallelism(String rpName,
+ int queryParallelism) {
+ return new AlterResourcePlanDesc(rpName, null, queryParallelism, null, false);
+ }
+
+ public static AlterResourcePlanDesc createChangeStatus(
+ String rpName, WMResourcePlanStatus status) {
+ return new AlterResourcePlanDesc(rpName, null, null, status, false);
+ }
+
+ public static AlterResourcePlanDesc createValidatePlan(String rpName) {
+ return new AlterResourcePlanDesc(rpName, null, null, null, true);
+ }
+
+ public static AlterResourcePlanDesc createRenamePlan(String rpName, String newName) {
+ return new AlterResourcePlanDesc(rpName, newName, null, null, false);
+ }
+
+ @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public String getRpName() {
+ return rpName;
+ }
+
+ public void setRpName(String rpName) {
+ this.rpName = rpName;
+ }
+
+ @Explain(displayName="newResourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public String getNewName() {
+ return newName;
+ }
+
+ public void setNewName(String newName) {
+ this.newName = newName;
+ }
+
+ @Explain(displayName="queryParallelism", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public Integer getQueryParallelism() {
+ return queryParallelism;
+ }
+
+ public void setQueryParallelism(Integer queryParallelism) {
+ this.queryParallelism = queryParallelism;
+ }
+
+ @Explain(displayName="status", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public WMResourcePlanStatus getStatus() {
+ return status;
+ }
+
+ public void setStatus(WMResourcePlanStatus status) {
+ this.status = status;
+ }
+
+ @Explain(displayName="shouldValidate", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public boolean shouldValidate() {
+ return validate;
+ }
+
+ public void setValidate(boolean validate) {
+ this.validate = validate;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
new file mode 100644
index 0000000..348e315
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Create ResourcePlan", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class CreateResourcePlanDesc extends DDLDesc implements Serializable {
+
+ private static final long serialVersionUID = -3649343104271794404L;
+
+ private String planName;
+ private Integer queryParallelism;
+
+ // For serialization only.
+ public CreateResourcePlanDesc() {
+ }
+
+ public CreateResourcePlanDesc(String planName, Integer queryParallelism) {
+ this.planName = planName;
+ this.queryParallelism = queryParallelism;
+ }
+
+ @Explain(displayName="name", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public String getName() {
+ return planName;
+ }
+
+ @Explain(displayName="queryParallelism")
+ public Integer getQueryParallelism() {
+ return queryParallelism;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
index 0b7c559..cfd2115 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
@@ -86,6 +86,11 @@ public class DDLWork implements Serializable {
private ShowConfDesc showConfDesc;
+ private CreateResourcePlanDesc createResourcePlanDesc;
+ private ShowResourcePlanDesc showResourcePlanDesc;
+ private DropResourcePlanDesc dropResourcePlanDesc;
+ private AlterResourcePlanDesc alterResourcePlanDesc;
+
boolean needLock = false;
/**
@@ -547,6 +552,30 @@ public class DDLWork implements Serializable {
this.killQueryDesc = killQueryDesc;
}
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ CreateResourcePlanDesc createResourcePlanDesc) {
+ this(inputs, outputs);
+ this.createResourcePlanDesc = createResourcePlanDesc;
+ }
+
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ ShowResourcePlanDesc showResourcePlanDesc) {
+ this(inputs, outputs);
+ this.showResourcePlanDesc = showResourcePlanDesc;
+ }
+
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ DropResourcePlanDesc dropResourcePlanDesc) {
+ this(inputs, outputs);
+ this.setDropResourcePlanDesc(dropResourcePlanDesc);
+ }
+
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ AlterResourcePlanDesc alterResourcePlanDesc) {
+ this(inputs, outputs);
+ this.setAlterResourcePlanDesc(alterResourcePlanDesc);
+ }
+
/**
* @return Create Database descriptor
*/
@@ -1235,4 +1264,38 @@ public class DDLWork implements Serializable {
public void setPreInsertTableDesc(PreInsertTableDesc preInsertTableDesc) {
this.preInsertTableDesc = preInsertTableDesc;
}
+
+ @Explain(displayName = "Create resource plan")
+ public CreateResourcePlanDesc getCreateResourcePlanDesc() {
+ return createResourcePlanDesc;
+ }
+
+ public void setCreateResourcePlanDesc(CreateResourcePlanDesc createResourcePlanDesc) {
+ this.createResourcePlanDesc = createResourcePlanDesc;
+ }
+
+ @Explain(displayName = "Show resource plan")
+ public ShowResourcePlanDesc getShowResourcePlanDesc() {
+ return showResourcePlanDesc;
+ }
+
+ public void setShowResourcePlanDesc(ShowResourcePlanDesc showResourcePlanDesc) {
+ this.showResourcePlanDesc = showResourcePlanDesc;
+ }
+
+ public DropResourcePlanDesc getDropResourcePlanDesc() {
+ return dropResourcePlanDesc;
+ }
+
+ public void setDropResourcePlanDesc(DropResourcePlanDesc dropResourcePlanDesc) {
+ this.dropResourcePlanDesc = dropResourcePlanDesc;
+ }
+
+ public AlterResourcePlanDesc getAlterResourcePlanDesc() {
+ return alterResourcePlanDesc;
+ }
+
+ public void setAlterResourcePlanDesc(AlterResourcePlanDesc alterResourcePlanDesc) {
+ this.alterResourcePlanDesc = alterResourcePlanDesc;
+ }
}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java
new file mode 100644
index 0000000..7fafdea
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java
@@ -0,0 +1,28 @@
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Drop Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class DropResourcePlanDesc extends DDLDesc implements Serializable {
+ private static final long serialVersionUID = 1258596919510047766L;
+
+ private String rpName;
+
+ public DropResourcePlanDesc() {}
+
+ public DropResourcePlanDesc(String rpName) {
+ this.setRpName(rpName);
+ }
+
+ @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public String getRpName() {
+ return rpName;
+ }
+
+ public void setRpName(String rpName) {
+ this.rpName = rpName;
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index 0f69de2..dc4866e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -139,7 +139,11 @@ public enum HiveOperation {
ROLLBACK("ROLLBACK", null, null, true, true),
SET_AUTOCOMMIT("SET AUTOCOMMIT", null, null, true, false),
ABORT_TRANSACTIONS("ABORT TRANSACTIONS", null, null, false, false),
- KILL_QUERY("KILL QUERY", null, null);
+ KILL_QUERY("KILL QUERY", null, null),
+ CREATE_RESOURCEPLAN("CREATE RESOURCEPLAN", null, null, false, false),
+ SHOW_RESOURCEPLAN("SHOW RESOURCEPLAN", null, null, false, false),
+ ALTER_RESOURCEPLAN("ALTER RESOURCEPLAN", null, null, false, false),
+ DROP_RESOURCEPLAN("DROP RESOURCEPLAN", null, null, false, false);
private String operationName;
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
new file mode 100644
index 0000000..0b4cfb5
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowResourcePlanDesc.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+@Explain(displayName = "Show Resource plans", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class ShowResourcePlanDesc extends DDLDesc implements Serializable {
+ private static final long serialVersionUID = 6076076933035978545L;
+
+ private static final String table = "show_resourceplan";
+ private static final String schema = "rp_name,status,query_parallelism#string,string,int";
+
+ String resFile;
+ String resourcePlanName;
+
+ // For serialization only.
+ public ShowResourcePlanDesc() {}
+
+ public ShowResourcePlanDesc(String rpName, Path resFile) {
+ this.resourcePlanName = rpName;
+ this.resFile = resFile.toString();
+ }
+
+ @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
+ public String getResFile() {
+ return resFile;
+ }
+
+ public void setResFile(String resFile) {
+ this.resFile = resFile;
+ }
+
+ @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+ public String getResourcePlanName() {
+ return resourcePlanName;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public String getSchema() {
+ return schema;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
index a3ab8f0..b5411c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -132,6 +132,11 @@ public enum HiveOperationType {
SHOW_TRANSACTIONS,
ABORT_TRANSACTIONS,
KILL_QUERY,
+ CREATE_RESOURCEPLAN,
+ SHOW_RESOURCEPLAN,
+ ALTER_RESOURCEPLAN,
+ DROP_RESOURCEPLAN,
+
// ==== Hive command operation types starts here ==== //
SET,
RESET,
http://git-wip-us.apache.org/repos/asf/hive/blob/a7e34455/ql/src/test/queries/clientpositive/resourceplan.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/resourceplan.q b/ql/src/test/queries/clientpositive/resourceplan.q
new file mode 100644
index 0000000..586491d
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/resourceplan.q
@@ -0,0 +1,106 @@
+-- Continue on errors, we do check some error conditions below.
+set hive.cli.errors.ignore=true;
+
+-- Prevent NPE in calcite.
+set hive.cbo.enable=false;
+
+-- Force DN to create db_privs tables.
+show grant user hive_test_user;
+
+-- Initialize the hive schema.
+source ../../metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql;
+
+--
+-- Actual tests.
+--
+
+-- Empty resource plans.
+SHOW RESOURCE PLANS;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- Create and show plan_1.
+CREATE RESOURCE PLAN plan_1;
+SHOW RESOURCE PLANS;
+SHOW RESOURCE PLAN plan_1;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- Create and show plan_2.
+CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM 10;
+SHOW RESOURCE PLANS;
+SHOW RESOURCE PLAN plan_2;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+--
+-- Rename resource plans.
+--
+
+-- Fail, duplicate name.
+ALTER RESOURCE PLAN plan_1 RENAME TO plan_2;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- Success.
+ALTER RESOURCE PLAN plan_1 RENAME TO plan_3;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- Change query parallelism, success.
+ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 20;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+--
+-- Activate, enable, disable.
+--
+
+-- DISABLED -> ACTIVE fail.
+ALTER RESOURCE PLAN plan_3 ACTIVATE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- DISABLED -> DISABLED success.
+ALTER RESOURCE PLAN plan_3 DISABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- DISABLED -> ENABLED success.
+ALTER RESOURCE PLAN plan_3 ENABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- ENABLED -> ACTIVE success.
+ALTER RESOURCE PLAN plan_3 ACTIVATE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- ACTIVE -> ACTIVE success.
+ALTER RESOURCE PLAN plan_3 ACTIVATE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- ACTIVE -> ENABLED fail.
+ALTER RESOURCE PLAN plan_3 ENABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- ACTIVE -> DISABLED fail.
+ALTER RESOURCE PLAN plan_3 DISABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- DISABLED -> ENABLED success.
+ALTER RESOURCE PLAN plan_2 ENABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- plan_2: ENABLED -> ACTIVE and plan_3: ACTIVE -> ENABLED, success.
+ALTER RESOURCE PLAN plan_2 ACTIVATE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- ENABLED -> ENABLED success.
+ALTER RESOURCE PLAN plan_3 ENABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+-- ENABLED -> DISABLED success.
+ALTER RESOURCE PLAN plan_3 DISABLE;
+SELECT * FROM SYS.WM_RESOURCEPLANS;
+
+--
+-- Drop resource plan.
+--
+
+-- Fail, active plan.
+DROP RESOURCE PLAN plan_2;
+
+-- Success.
+DROP RESOURCE PLAN plan_3;
+SELECT * FROM SYS.WM_RESOURCEPLANS;