You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@submarine.apache.org by li...@apache.org on 2020/03/09 13:12:08 UTC

[submarine] branch master updated: SUBMARINE-413. Custom show databases command to allow exposing filtered databases

This is an automated email from the ASF dual-hosted git repository.

liuxun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/submarine.git


The following commit(s) were added to refs/heads/master by this push:
     new f21a277  SUBMARINE-413. Custom show databases command to allow exposing filtered databases
f21a277 is described below

commit f21a27705e594e37f25abd5b3df91256b16acc8b
Author: Kent Yao <ya...@hotmail.com>
AuthorDate: Mon Mar 9 20:02:01 2020 +0800

    SUBMARINE-413. Custom show databases command to allow exposing filtered databases
    
    ### What is this PR for?
    
    add a new Command to replace spark's own ShowDatabasesCommand to filter out these not allowed.
    
    ### What type of PR is it?
    Improvement
    
    ### Todos
    
    * [ ] - Show tables with filtered objects
    * [ ] - Row-level filtering
    * [ ] - Datamasking filtering
    * [ ] - Configuration Restriction
    
    ### What is the Jira issue?
    * Open an issue on Jira https://issues.apache.org/jira/browse/SUBMARINE/SUBMARINE-413
    
    ### How should this be tested?
    
    unit tests added
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? Yes, will do after finish all
    
    Author: Kent Yao <ya...@hotmail.com>
    
    Closes #212 from yaooqinn/SUBMARINE-413 and squashes the following commits:
    
    af10bb2 [Kent Yao] refine artifactId
    900b264 [Kent Yao] SUBMARINE-413. Custom show databases command to allow exposing filtered databases
---
 .travis.yml                                        | 18 +++++------
 submarine-security/spark-security/pom.xml          |  2 +-
 .../optimizer/RangerSparkAuthorizerExtension.scala |  3 ++
 .../execution/SubmarineShowDatabasesCommand.scala  | 36 ++++++++++++++++++++++
 .../spark/security/SparkRangerAuthorizerTest.scala | 23 ++++++++++++++
 5 files changed, 72 insertions(+), 10 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index b047980..a811f7a 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -57,7 +57,7 @@ env:
     - EXCLUDE_CLOUD="!${SUBMARINE}:submarine-cloud"
     - EXCLUDE_DIST="!${SUBMARINE}:submarine-dist"
     - EXCLUDE_TEST="!${SUBMARINE}:submarine-test,!${SUBMARINE}:submarine-test-e2e,!${SUBMARINE}:submarine-test-k8s"
-    - EXCLUDE_SPARK_SECURTITY="!${SUBMARINE}:spark-security"
+    - EXCLUDE_SPARK_SECURTITY="!${SUBMARINE}:submarine-spark-security"
     - MOZ_HEADLESS=1
 
 before_install:
@@ -262,7 +262,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.3 -Pranger-1.0"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.3 and ranger 1.1
       language: scala
@@ -272,7 +272,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.3 -Pranger-1.1"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.3 and ranger 1.2
       language: scala
@@ -282,7 +282,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.3 -Pranger-1.2"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.3 and ranger 2.0
       language: scala
@@ -292,7 +292,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.3 -Pranger-2.0"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.4 and ranger 1.0
       language: scala
@@ -302,7 +302,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.4 -Pranger-1.0"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.4 and ranger 1.1
       language: scala
@@ -312,7 +312,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.4 -Pranger-1.1"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.4 and ranger 1.2
       language: scala
@@ -322,7 +322,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.4 -Pranger-1.2"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 
     - name: Test submarine spark security with spark 2.4 and ranger 2.0
       language: scala
@@ -332,7 +332,7 @@ matrix:
         - BUILD_FLAG="--no-transfer-progress clean install -Dmaven.javadoc.skip=true"
         - TEST_FLAG=$BUILD_FLAG
         - PROFILE="-Pspark-2.4 -Pranger-2.0"
-        - MODULES="-pl :spark-security"
+        - MODULES="-pl :submarine-spark-security"
 install:
   - mvn --version
   - echo ">>> mvn $BUILD_FLAG $MODULES $PROFILE -B"
diff --git a/submarine-security/spark-security/pom.xml b/submarine-security/spark-security/pom.xml
index 6169a75..4cc7a01 100644
--- a/submarine-security/spark-security/pom.xml
+++ b/submarine-security/spark-security/pom.xml
@@ -31,7 +31,7 @@
   <packaging>jar</packaging>
 
   <name>Submarine: Spark Security</name>
-  <artifactId>spark-security</artifactId>
+  <artifactId>submarine-spark-security</artifactId>
 
   <properties>
     <eclipse.jpa.version>2.5.2</eclipse.jpa.version>
diff --git a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala
index dcbb954..8fa110c 100644
--- a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala
+++ b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala
@@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan}
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.execution.command.{AlterDatabasePropertiesCommand, AlterTableAddPartitionCommand, AlterTableDropPartitionCommand, AlterTableRecoverPartitionsCommand, AlterTableRenameCommand, AlterTableRenamePartitionCommand, AlterTableSerDePropertiesCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, AlterViewAsCommand, AnalyzeColumnCommand, AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand, CreateDataSourceTabl [...]
 import org.apache.spark.sql.execution.datasources.{CreateTempViewUsing, InsertIntoDataSourceCommand, InsertIntoHadoopFsRelationCommand}
+import org.apache.spark.sql.execution.SubmarineShowDatabasesCommand
 import org.apache.submarine.spark.security.{RangerSparkAuthorizer, SparkAccessControlException}
 
 /**
@@ -48,6 +49,8 @@ case class RangerSparkAuthorizerExtension(spark: SparkSession) extends Rule[Logi
    */
   override def apply(plan: LogicalPlan): LogicalPlan = {
     plan match {
+      case s: ShowDatabasesCommand => SubmarineShowDatabasesCommand(s)
+      case s: SubmarineShowDatabasesCommand => s
       case _ =>
         val operationType: SparkOperationType = toOperationType(plan)
         val (in, out) = PrivilegesBuilder.build(plan)
diff --git a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/execution/SubmarineShowDatabasesCommand.scala b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/execution/SubmarineShowDatabasesCommand.scala
new file mode 100644
index 0000000..fd3f7c3
--- /dev/null
+++ b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/execution/SubmarineShowDatabasesCommand.scala
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution
+
+import org.apache.spark.sql.{Row, SparkSession}
+import org.apache.spark.sql.execution.command.{RunnableCommand, ShowDatabasesCommand}
+import org.apache.submarine.spark.security.{RangerSparkAuthorizer, SparkPrivilegeObject, SparkPrivilegeObjectType}
+
+case class SubmarineShowDatabasesCommand(child: ShowDatabasesCommand) extends RunnableCommand {
+  override val output = child.output
+
+  override def run(sparkSession: SparkSession): Seq[Row] = {
+    val rows = child.run(sparkSession)
+    rows.filter(r => RangerSparkAuthorizer.isAllowed(toSparkPrivilegeObject(r)))
+  }
+
+  private def toSparkPrivilegeObject(row: Row): SparkPrivilegeObject = {
+    val database = row.getString(0)
+    new SparkPrivilegeObject(SparkPrivilegeObjectType.DATABASE, database, database)
+  }
+}
diff --git a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
index 049dcdb..06fc247 100644
--- a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
+++ b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
@@ -65,9 +65,32 @@ class SparkRangerAuthorizerTest extends FunSuite with BeforeAndAfterAll {
       """
         |CREATE DATABASE testdb
         |""".stripMargin)
+    // before authorization enabled
+    withUser("alice") {
+      assert(sql("show databases").count() === 2)
+    }
+    withUser("bob") {
+      assert(sql("show databases").count() === 2)
+    }
+    withUser("kent") {
+      assert(sql("show databases").count() === 2)
+    }
     enableAuthorizer(spark)
   }
 
+  test("show databases") {
+    withUser("alice") {
+      assert(sql("show databases").count() === 0)
+    }
+    withUser("bob") {
+      assert(sql("show databases").count() === 1)
+      assert(sql("show databases").head().getString(0) === "default")
+    }
+    withUser("kent") {
+      assert(sql("show databases").count() === 1)
+    }
+  }
+
   test("use database") {
     withUser("alice") {
       val e = intercept[SparkAccessControlException](sql("use default"))


---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscribe@submarine.apache.org
For additional commands, e-mail: dev-help@submarine.apache.org