You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by gatorsmile <gi...@git.apache.org> on 2017/05/19 17:52:58 UTC

[GitHub] spark pull request #17665: [SPARK-16742] Mesos Kerberos Support

Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/17665#discussion_r117538205
  
    --- Diff: resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/security/ConfigurableCredentialManagerSuite.scala ---
    @@ -45,106 +42,13 @@ class ConfigurableCredentialManagerSuite extends SparkFunSuite with Matchers wit
         super.afterAll()
       }
     
    -  test("Correctly load default credential providers") {
    -    credentialManager = new ConfigurableCredentialManager(sparkConf, hadoopConf)
    +  test("Correctly load YARNHadoopFSCredentialProvider") {
    +    credentialManager = new YARNConfigurableCredentialManager(sparkConf, hadoopConf)
     
    -    credentialManager.getServiceCredentialProvider("hadoopfs") should not be (None)
    -    credentialManager.getServiceCredentialProvider("hbase") should not be (None)
    -    credentialManager.getServiceCredentialProvider("hive") should not be (None)
    +    assert(credentialManager
    +      .getServiceCredentialProvider("hadoopfs")
    +      .get
    +      .isInstanceOf[YARNHadoopFSCredentialProvider])
       }
     
    -  test("disable hive credential provider") {
    -    sparkConf.set("spark.yarn.security.credentials.hive.enabled", "false")
    -    credentialManager = new ConfigurableCredentialManager(sparkConf, hadoopConf)
    -
    -    credentialManager.getServiceCredentialProvider("hadoopfs") should not be (None)
    -    credentialManager.getServiceCredentialProvider("hbase") should not be (None)
    -    credentialManager.getServiceCredentialProvider("hive") should be (None)
    -  }
    -
    -  test("using deprecated configurations") {
    -    sparkConf.set("spark.yarn.security.tokens.hadoopfs.enabled", "false")
    -    sparkConf.set("spark.yarn.security.tokens.hive.enabled", "false")
    -    credentialManager = new ConfigurableCredentialManager(sparkConf, hadoopConf)
    -
    -    credentialManager.getServiceCredentialProvider("hadoopfs") should be (None)
    -    credentialManager.getServiceCredentialProvider("hive") should be (None)
    -    credentialManager.getServiceCredentialProvider("test") should not be (None)
    -    credentialManager.getServiceCredentialProvider("hbase") should not be (None)
    -  }
    -
    -  test("verify obtaining credentials from provider") {
    -    credentialManager = new ConfigurableCredentialManager(sparkConf, hadoopConf)
    -    val creds = new Credentials()
    -
    -    // Tokens can only be obtained from TestTokenProvider, for hdfs, hbase and hive tokens cannot
    -    // be obtained.
    -    credentialManager.obtainCredentials(hadoopConf, creds)
    -    val tokens = creds.getAllTokens
    -    tokens.size() should be (1)
    -    tokens.iterator().next().getService should be (new Text("test"))
    -  }
    -
    -  test("verify getting credential renewal info") {
    -    credentialManager = new ConfigurableCredentialManager(sparkConf, hadoopConf)
    -    val creds = new Credentials()
    -
    -    val testCredentialProvider = credentialManager.getServiceCredentialProvider("test").get
    -      .asInstanceOf[TestCredentialProvider]
    -    // Only TestTokenProvider can get the time of next token renewal
    -    val nextRenewal = credentialManager.obtainCredentials(hadoopConf, creds)
    -    nextRenewal should be (testCredentialProvider.timeOfNextTokenRenewal)
    -  }
    -
    -  test("obtain tokens For HiveMetastore") {
    -    val hadoopConf = new Configuration()
    -    hadoopConf.set("hive.metastore.kerberos.principal", "bob")
    -    // thrift picks up on port 0 and bails out, without trying to talk to endpoint
    -    hadoopConf.set("hive.metastore.uris", "http://localhost:0")
    -
    -    val hiveCredentialProvider = new HiveCredentialProvider()
    -    val credentials = new Credentials()
    -    hiveCredentialProvider.obtainCredentials(hadoopConf, sparkConf, credentials)
    -
    -    credentials.getAllTokens.size() should be (0)
    -  }
    -
    -  test("Obtain tokens For HBase") {
    -    val hadoopConf = new Configuration()
    -    hadoopConf.set("hbase.security.authentication", "kerberos")
    -
    -    val hbaseTokenProvider = new HBaseCredentialProvider()
    -    val creds = new Credentials()
    -    hbaseTokenProvider.obtainCredentials(hadoopConf, sparkConf, creds)
    -
    -    creds.getAllTokens.size should be (0)
    -  }
    -}
    -
    -class TestCredentialProvider extends ServiceCredentialProvider {
    -  val tokenRenewalInterval = 86400 * 1000L
    -  var timeOfNextTokenRenewal = 0L
    -
    -  override def serviceName: String = "test"
    -
    -  override def credentialsRequired(conf: Configuration): Boolean = true
    -
    -  override def obtainCredentials(
    -      hadoopConf: Configuration,
    -      sparkConf: SparkConf,
    -      creds: Credentials): Option[Long] = {
    -    if (creds == null) {
    -      // Guard out other unit test failures.
    -      return None
    -    }
    -
    -    val emptyToken = new Token()
    -    emptyToken.setService(new Text("test"))
    -    creds.addToken(emptyToken.getService, emptyToken)
    -
    -    val currTime = System.currentTimeMillis()
    -    timeOfNextTokenRenewal = (currTime - currTime % tokenRenewalInterval) + tokenRenewalInterval
    -
    -    Some(timeOfNextTokenRenewal)
    -  }
    --- End diff --
    
    Could you leave comments for each deleted test case to explain what are the new location?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org