You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2014/05/25 21:27:48 UTC
svn commit: r1597463 - in /hive/trunk/ql/src:
java/org/apache/hadoop/hive/ql/ java/org/apache/hadoop/hive/ql/exec/
java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/
java/org/apache/hadoop/hive/ql/security/authorization/ test/qu...
Author: hashutosh
Date: Sun May 25 19:27:47 2014
New Revision: 1597463
URL: http://svn.apache.org/r1597463
Log:
HIVE-5961 : Add explain authorize for checking privileges (Navis via Ashutosh Chauhan)
Added:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java
hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q
hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sun May 25 19:27:47 2014
@@ -101,6 +101,7 @@ import org.apache.hadoop.hive.ql.plan.Ta
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
@@ -456,8 +457,9 @@ public class Driver implements CommandPr
schema = getSchema(sem, conf);
//do the authorization check
- if (HiveConf.getBoolVar(conf,
- HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+ if (!sem.skipAuthorization() &&
+ HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+
try {
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
doAuthorization(sem);
@@ -472,8 +474,6 @@ public class Driver implements CommandPr
}
}
- //restore state after we're done executing a specific query
-
return 0;
} catch (Exception e) {
ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage());
@@ -500,34 +500,34 @@ public class Driver implements CommandPr
}
}
- private void doAuthorization(BaseSemanticAnalyzer sem)
- throws HiveException, AuthorizationException {
+ public static void doAuthorization(BaseSemanticAnalyzer sem)
+ throws HiveException, AuthorizationException {
HashSet<ReadEntity> inputs = sem.getInputs();
HashSet<WriteEntity> outputs = sem.getOutputs();
SessionState ss = SessionState.get();
- HiveOperation op = ss.getHiveOperation();
+ HiveOperation op = sem.getHiveOperation();
Hive db = sem.getDb();
if (ss.isAuthorizationModeV2()) {
doAuthorizationV2(ss, op, inputs, outputs);
return;
}
-
if (op == null) {
throw new HiveException("Operation should not be null");
}
+ HiveAuthorizationProvider authorizer = ss.getAuthorizer();
if (op.equals(HiveOperation.CREATEDATABASE)) {
- ss.getAuthorizer().authorize(
+ authorizer.authorize(
op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges());
} else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
|| op.equals(HiveOperation.CREATETABLE)) {
- ss.getAuthorizer().authorize(
+ authorizer.authorize(
db.getDatabase(SessionState.get().getCurrentDatabase()), null,
HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
} else {
if (op.equals(HiveOperation.IMPORT)) {
ImportSemanticAnalyzer isa = (ImportSemanticAnalyzer) sem;
if (!isa.existsTable()) {
- ss.getAuthorizer().authorize(
+ authorizer.authorize(
db.getDatabase(SessionState.get().getCurrentDatabase()), null,
HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
}
@@ -539,7 +539,7 @@ public class Driver implements CommandPr
continue;
}
if (write.getType() == Entity.Type.DATABASE) {
- ss.getAuthorizer().authorize(write.getDatabase(),
+ authorizer.authorize(write.getDatabase(),
null, op.getOutputRequiredPrivileges());
continue;
}
@@ -548,14 +548,14 @@ public class Driver implements CommandPr
Partition part = db.getPartition(write.getTable(), write
.getPartition().getSpec(), false);
if (part != null) {
- ss.getAuthorizer().authorize(write.getPartition(), null,
+ authorizer.authorize(write.getPartition(), null,
op.getOutputRequiredPrivileges());
continue;
}
}
if (write.getTable() != null) {
- ss.getAuthorizer().authorize(write.getTable(), null,
+ authorizer.authorize(write.getTable(), null,
op.getOutputRequiredPrivileges());
}
}
@@ -646,7 +646,7 @@ public class Driver implements CommandPr
continue;
}
if (read.getType() == Entity.Type.DATABASE) {
- ss.getAuthorizer().authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null);
+ authorizer.authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null);
continue;
}
Table tbl = read.getTable();
@@ -657,11 +657,11 @@ public class Driver implements CommandPr
if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) {
List<String> cols = part2Cols.get(partition);
if (cols != null && cols.size() > 0) {
- ss.getAuthorizer().authorize(partition.getTable(),
+ authorizer.authorize(partition.getTable(),
partition, cols, op.getInputRequiredPrivileges(),
null);
} else {
- ss.getAuthorizer().authorize(partition,
+ authorizer.authorize(partition,
op.getInputRequiredPrivileges(), null);
}
continue;
@@ -675,10 +675,10 @@ public class Driver implements CommandPr
!(tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE)) {
List<String> cols = tab2Cols.get(tbl);
if (cols != null && cols.size() > 0) {
- ss.getAuthorizer().authorize(tbl, null, cols,
+ authorizer.authorize(tbl, null, cols,
op.getInputRequiredPrivileges(), null);
} else {
- ss.getAuthorizer().authorize(tbl, op.getInputRequiredPrivileges(),
+ authorizer.authorize(tbl, op.getInputRequiredPrivileges(),
null);
}
tableAuthChecked.add(tbl.getTableName());
@@ -688,7 +688,7 @@ public class Driver implements CommandPr
}
}
- private void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet<ReadEntity> inputs,
+ private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) throws HiveException {
HiveOperationType hiveOpType = getHiveOperationType(op);
List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputs);
@@ -697,7 +697,7 @@ public class Driver implements CommandPr
return;
}
- private List<HivePrivilegeObject> getHivePrivObjects(HashSet<? extends Entity> privObjects) {
+ private static List<HivePrivilegeObject> getHivePrivObjects(HashSet<? extends Entity> privObjects) {
List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
if(privObjects == null){
return hivePrivobjs;
@@ -748,9 +748,7 @@ public class Driver implements CommandPr
return hivePrivobjs;
}
-
-
- private HiveOperationType getHiveOperationType(HiveOperation op) {
+ private static HiveOperationType getHiveOperationType(HiveOperation op) {
return HiveOperationType.valueOf(op.name());
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Sun May 25 19:27:47 2014
@@ -28,6 +28,7 @@ import java.lang.reflect.InvocationTarge
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
@@ -40,15 +41,22 @@ import java.util.TreeMap;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.DriverContext;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.plan.Explain;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.plan.TezWork;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.api.StageType;
+import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory;
+import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.StringUtils;
import org.json.JSONArray;
@@ -209,6 +217,41 @@ public class ExplainTask extends Task<Ex
return jsonOutput ? outJSONObject : null;
}
+ private List<String> toString(Collection<?> objects) {
+ List<String> list = new ArrayList<String>();
+ for (Object object : objects) {
+ list.add(String.valueOf(object));
+ }
+ return list;
+ }
+
+ private Object toJson(String header, String message, PrintStream out, ExplainWork work)
+ throws Exception {
+ if (work.isFormatted()) {
+ return message;
+ }
+ out.print(header);
+ out.println(": ");
+ out.print(indentString(2));
+ out.println(message);
+ return null;
+ }
+
+ private Object toJson(String header, List<String> messages, PrintStream out, ExplainWork work)
+ throws Exception {
+ if (work.isFormatted()) {
+ return new JSONArray(messages);
+ }
+ out.print(header);
+ out.println(": ");
+ for (String message : messages) {
+ out.print(indentString(2));
+ out.print(message);
+ out.println();
+ }
+ return null;
+ }
+
@Override
public int execute(DriverContext driverContext) {
@@ -223,6 +266,14 @@ public class ExplainTask extends Task<Ex
if (work.isFormatted()) {
out.print(jsonLogicalPlan);
}
+ } else if (work.isAuthorize()) {
+ JSONObject jsonAuth = collectAuthRelatedEntities(out, work);
+ if (work.isFormatted()) {
+ out.print(jsonAuth);
+ }
+ } else if (work.getDependency()) {
+ JSONObject jsonDependencies = getJSONDependencies(work);
+ out.print(jsonDependencies);
} else {
if (work.getDependency()) {
JSONObject jsonDependencies = getJSONDependencies(work);
@@ -249,7 +300,60 @@ public class ExplainTask extends Task<Ex
}
}
- private String indentString(int indent) {
+ private JSONObject collectAuthRelatedEntities(PrintStream out, ExplainWork work)
+ throws Exception {
+
+ BaseSemanticAnalyzer analyzer = work.getAnalyzer();
+ HiveOperation operation = analyzer.getHiveOperation();
+
+ JSONObject object = new JSONObject();
+ Object jsonInput = toJson("INPUTS", toString(analyzer.getInputs()), out, work);
+ if (work.isFormatted()) {
+ object.put("INPUTS", jsonInput);
+ }
+ Object jsonOutput = toJson("OUTPUTS", toString(analyzer.getOutputs()), out, work);
+ if (work.isFormatted()) {
+ object.put("OUTPUTS", jsonOutput);
+ }
+ String userName = SessionState.get().getAuthenticator().getUserName();
+ Object jsonUser = toJson("CURRENT_USER", userName, out, work);
+ if (work.isFormatted()) {
+ object.put("CURRENT_USER", jsonUser);
+ }
+ Object jsonOperation = toJson("OPERATION", operation.name(), out, work);
+ if (work.isFormatted()) {
+ object.put("OPERATION", jsonOperation);
+ }
+ if (analyzer.skipAuthorization()) {
+ return object;
+ }
+ HiveAuthorizationProvider delegate = SessionState.get().getAuthorizer();
+
+ final List<String> exceptions = new ArrayList<String>();
+ HiveAuthorizationProvider authorizer = AuthorizationFactory.create(delegate,
+ new AuthorizationFactory.AuthorizationExceptionHandler() {
+ public void exception(AuthorizationException exception) {
+ exceptions.add(exception.getMessage());
+ }
+ });
+
+ SessionState.get().setAuthorizer(authorizer);
+ try {
+ Driver.doAuthorization(analyzer);
+ } finally {
+ SessionState.get().setAuthorizer(delegate);
+ }
+
+ if (!exceptions.isEmpty()) {
+ Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, work);
+ if (work.isFormatted()) {
+ object.put("AUTHORIZATION_FAILURES", jsonFails);
+ }
+ }
+ return object;
+ }
+
+ private static String indentString(int indent) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < indent; ++i) {
sb.append(" ");
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Sun May 25 19:27:47 2014
@@ -21,8 +21,6 @@ package org.apache.hadoop.hive.ql.parse;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.sql.Date;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -49,7 +47,6 @@ import org.apache.hadoop.hive.ql.QueryPr
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.LineageInfo;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
@@ -70,6 +67,7 @@ import org.apache.hadoop.hive.ql.metadat
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.session.SessionState;
@@ -107,6 +105,8 @@ public abstract class BaseSemanticAnalyz
public static int HIVE_COLUMN_ORDER_ASC = 1;
public static int HIVE_COLUMN_ORDER_DESC = 0;
+ protected HiveOperation hiveOperation;
+
/**
* ReadEntitites that are passed to the hooks.
*/
@@ -145,6 +145,18 @@ public abstract class BaseSemanticAnalyz
protected static final String PARQUETFILE_OUTPUT = MapredParquetOutputFormat.class.getName();
protected static final String PARQUETFILE_SERDE = ParquetHiveSerDe.class.getName();
+ public HiveOperation getHiveOperation() {
+ return hiveOperation;
+ }
+
+ public void setHiveOperation(HiveOperation hiveOperation) {
+ this.hiveOperation = hiveOperation;
+ }
+
+ public boolean skipAuthorization() {
+ return false;
+ }
+
class RowFormatParams {
String fieldDelim = null;
String fieldEscape = null;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Sun May 25 19:27:47 2014
@@ -25,6 +25,7 @@ import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
@@ -48,12 +49,20 @@ public class ExplainSemanticAnalyzer ext
boolean formatted = false;
boolean dependency = false;
boolean logical = false;
- if (ast.getChildCount() == 2) {
- int explainOptions = ast.getChild(1).getType();
- formatted = (explainOptions == HiveParser.KW_FORMATTED);
- extended = (explainOptions == HiveParser.KW_EXTENDED);
- dependency = (explainOptions == HiveParser.KW_DEPENDENCY);
- logical = (explainOptions == HiveParser.KW_LOGICAL);
+ boolean authorize = false;
+ for (int i = 1; i < ast.getChildCount(); i++) {
+ int explainOptions = ast.getChild(i).getType();
+ if (explainOptions == HiveParser.KW_FORMATTED) {
+ formatted = true;
+ } else if (explainOptions == HiveParser.KW_EXTENDED) {
+ extended = true;
+ } else if (explainOptions == HiveParser.KW_DEPENDENCY) {
+ dependency = true;
+ } else if (explainOptions == HiveParser.KW_LOGICAL) {
+ logical = true;
+ } else if (explainOptions == HiveParser.KW_AUTHORIZATION) {
+ authorize = true;
+ }
}
ctx.setExplain(true);
@@ -87,11 +96,12 @@ public class ExplainSemanticAnalyzer ext
tasks,
fetchTask,
input.dump(),
- sem.getInputs(),
+ sem,
extended,
formatted,
dependency,
- logical);
+ logical,
+ authorize);
work.setAppendTaskType(
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES));
@@ -106,4 +116,12 @@ public class ExplainSemanticAnalyzer ext
public List<FieldSchema> getResultSchema() {
return fieldList;
}
+
+ @Override
+ public boolean skipAuthorization() {
+ List<Task<? extends Serializable>> rootTasks = getRootTasks();
+ assert rootTasks != null && rootTasks.size() == 1;
+ Task task = rootTasks.get(0);
+ return task instanceof ExplainTask && ((ExplainTask)task).getWork().isAuthorize();
+ }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Sun May 25 19:27:47 2014
@@ -295,6 +295,7 @@ KW_COMPACT: 'COMPACT';
KW_COMPACTIONS: 'COMPACTIONS';
KW_TRANSACTIONS: 'TRANSACTIONS';
KW_REWRITE : 'REWRITE';
+KW_AUTHORIZATION: 'AUTHORIZATION';
// Operators
// NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sun May 25 19:27:47 2014
@@ -619,13 +619,18 @@ statement
explainStatement
@init { pushMsg("explain statement", state); }
@after { popMsg(state); }
- : KW_EXPLAIN
- ( (explainOptions=KW_EXTENDED|explainOptions=KW_FORMATTED|explainOptions=KW_DEPENDENCY|explainOptions=KW_LOGICAL)? execStatement
- -> ^(TOK_EXPLAIN execStatement $explainOptions?) |
- KW_REWRITE queryStatementExpression[true] -> ^(TOK_EXPLAIN_SQ_REWRITE queryStatementExpression)
- )
+ : KW_EXPLAIN (
+ explainOption* execStatement -> ^(TOK_EXPLAIN execStatement explainOption*)
+ |
+ KW_REWRITE queryStatementExpression[true] -> ^(TOK_EXPLAIN_SQ_REWRITE queryStatementExpression))
;
+explainOption
+@init { msgs.push("explain option"); }
+@after { msgs.pop(); }
+ : KW_EXTENDED|KW_FORMATTED|KW_DEPENDENCY|KW_LOGICAL|KW_AUTHORIZATION
+ ;
+
execStatement
@init { pushMsg("statement", state); }
@after { popMsg(state); }
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Sun May 25 19:27:47 2014
@@ -538,5 +538,5 @@ functionIdentifier
nonReserved
:
- KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION |
KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE |
KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE
+ KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION |
KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_PARQUETFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE |
KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_ROLES | KW_INNER | KW_DEFINED | KW_ADMIN | KW_JAR | KW_FILE | KW_OWNER | KW_PRINCIPALS | KW_ALL | KW_DEFAULT | KW_NONE | KW_COMPACT | KW_COMPACTIONS | KW_TRANSACTIONS | KW_REWRITE | KW_AUTHORIZATION
;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Sun May 25 19:27:47 2014
@@ -147,10 +147,33 @@ public final class SemanticAnalyzerFacto
public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
throws SemanticException {
+ BaseSemanticAnalyzer analyzer = getAnalyzer(conf, tree);
+
+ HiveOperation operation;
+ if (tree.getType() == HiveParser.TOK_ALTERTABLE_PARTITION) {
+ Integer type = tree.getChild(1).getType();
+ if (tree.getChild(0).getChildCount() > 1) {
+ operation = tablePartitionCommandType.get(type)[1];
+ } else {
+ operation = tablePartitionCommandType.get(type)[0];
+ }
+ } else {
+ operation = commandType.get(tree.getType());
+ }
+ analyzer.setHiveOperation(operation);
+
+ if (SessionState.get() != null) {
+ SessionState.get().setCommandType(operation);
+ }
+
+ return analyzer;
+ }
+
+ private static BaseSemanticAnalyzer getAnalyzer(HiveConf conf, ASTNode tree)
+ throws SemanticException {
if (tree.getToken() == null) {
throw new RuntimeException("Empty Syntax Tree");
} else {
- setSessionCommandType(commandType.get(tree.getToken().getType()));
switch (tree.getToken().getType()) {
case HiveParser.TOK_EXPLAIN:
@@ -232,14 +255,6 @@ public final class SemanticAnalyzerFacto
return new DDLSemanticAnalyzer(conf);
case HiveParser.TOK_ALTERTABLE_PARTITION:
- HiveOperation commandType = null;
- Integer type = ((ASTNode) tree.getChild(1)).getToken().getType();
- if (tree.getChild(0).getChildCount() > 1) {
- commandType = tablePartitionCommandType.get(type)[1];
- } else {
- commandType = tablePartitionCommandType.get(type)[0];
- }
- setSessionCommandType(commandType);
return new DDLSemanticAnalyzer(conf);
case HiveParser.TOK_CREATEFUNCTION:
@@ -258,12 +273,6 @@ public final class SemanticAnalyzerFacto
}
}
- private static void setSessionCommandType(HiveOperation commandType) {
- if (SessionState.get() != null) {
- SessionState.get().setCommandType(commandType);
- }
- }
-
private SemanticAnalyzerFactory() {
// prevent instantiation
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java?rev=1597463&r1=1597462&r2=1597463&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java Sun May 25 19:27:47 2014
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
/**
* ExplainWork.
@@ -49,6 +50,9 @@ public class ExplainWork implements Seri
boolean appendTaskType;
+ boolean authorize;
+
+ private transient BaseSemanticAnalyzer analyzer;
public ExplainWork() {
}
@@ -58,21 +62,24 @@ public class ExplainWork implements Seri
List<Task<? extends Serializable>> rootTasks,
Task<? extends Serializable> fetchTask,
String astStringTree,
- HashSet<ReadEntity> inputs,
+ BaseSemanticAnalyzer analyzer,
boolean extended,
boolean formatted,
boolean dependency,
- boolean logical) {
+ boolean logical,
+ boolean authorize) {
this.resFile = resFile;
this.rootTasks = new ArrayList<Task<? extends Serializable>>(rootTasks);
this.fetchTask = fetchTask;
this.astStringTree = astStringTree;
- this.inputs = inputs;
+ this.analyzer = analyzer;
+ this.inputs = analyzer.getInputs();
this.extended = extended;
this.formatted = formatted;
this.dependency = dependency;
this.logical = logical;
this.pCtx = pCtx;
+ this.authorize = authorize;
}
public Path getResFile() {
@@ -162,4 +169,16 @@ public class ExplainWork implements Seri
public void setAppendTaskType(boolean appendTaskType) {
this.appendTaskType = appendTaskType;
}
+
+ public boolean isAuthorize() {
+ return authorize;
+ }
+
+ public void setAuthorize(boolean authorize) {
+ this.authorize = authorize;
+ }
+
+ public BaseSemanticAnalyzer getAnalyzer() {
+ return analyzer;
+ }
}
Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java?rev=1597463&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationFactory.java Sun May 25 19:27:47 2014
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+
+public class AuthorizationFactory {
+
+ public static HiveAuthorizationProvider create(HiveAuthorizationProvider delegated) {
+ return create(delegated, new DefaultAuthorizationExceptionHandler());
+ }
+
+ public static HiveAuthorizationProvider create(final HiveAuthorizationProvider delegated,
+ final AuthorizationExceptionHandler handler) {
+
+ InvocationHandler invocation = new InvocationHandler() {
+ public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
+ invokeAuth(method, args);
+ return null;
+ }
+
+ private void invokeAuth(Method method, Object[] args) throws Throwable {
+ try {
+ method.invoke(delegated, args);
+ } catch (InvocationTargetException e) {
+ if (e.getTargetException() instanceof AuthorizationException) {
+ handler.exception((AuthorizationException) e.getTargetException());
+ }
+ }
+ }
+ };
+
+ return (HiveAuthorizationProvider)Proxy.newProxyInstance(
+ AuthorizationFactory.class.getClassLoader(),
+ new Class[] {HiveAuthorizationProvider.class},
+ invocation);
+ }
+
+ public static interface AuthorizationExceptionHandler {
+ void exception(AuthorizationException exception) throws AuthorizationException;
+ }
+
+ public static class DefaultAuthorizationExceptionHandler
+ implements AuthorizationExceptionHandler {
+ public void exception(AuthorizationException exception) {
+ throw exception;
+ }
+ }
+}
Added: hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q?rev=1597463&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/authorization_explain.q Sun May 25 19:27:47 2014
@@ -0,0 +1,7 @@
+set hive.security.authorization.enabled=true;
+
+explain authorization select * from src join srcpart;
+explain formatted authorization select * from src join srcpart;
+
+explain authorization use default;
+explain formatted authorization use default;
Added: hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.out?rev=1597463&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/authorization_explain.q.out Sun May 25 19:27:47 2014
@@ -0,0 +1,42 @@
+Warning: Shuffle Join JOIN[4][tables = [src, srcpart]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+INPUTS:
+ default@srcpart
+ default@srcpart@ds=2008-04-08/hr=11
+ default@srcpart@ds=2008-04-08/hr=12
+ default@srcpart@ds=2008-04-09/hr=11
+ default@srcpart@ds=2008-04-09/hr=12
+ default@src
+OUTPUTS:
+#### A masked pattern was here ####
+CURRENT_USER:
+ hive_test_user
+OPERATION:
+ QUERY
+AUTHORIZATION_FAILURES:
+ No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}
+ No privilege 'Select' found for inputs { database:default, table:src, columnName:key}
+Warning: Shuffle Join JOIN[4][tables = [src, srcpart]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain formatted authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain formatted authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+PREHOOK: query: explain authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain authorization use default
+POSTHOOK: type: SWITCHDATABASE
+INPUTS:
+OUTPUTS:
+CURRENT_USER:
+ hive_test_user
+OPERATION:
+ SWITCHDATABASE
+PREHOOK: query: explain formatted authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain formatted authorization use default
+POSTHOOK: type: SWITCHDATABASE
+{"OUTPUTS":[],"INPUTS":[],"OPERATION":"SWITCHDATABASE","CURRENT_USER":"hive_test_user"}