You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@phoenix.apache.org by Abel Fernández <me...@gmail.com> on 2016/05/11 17:54:49 UTC

Apache Phoenix with HBaseTestingUtility and MiniKdc

Hello,

I am trying to set up an unit test in local environment for testing Apache
Phoenix with kerberos.

Does anyone did something similar in the past?

I am able to start the minicluster with kerberos but when I am getting the
connection from the url I am always having the same error:

2016-05-11 18:26:04,234 (LeaseRenewer:C0228488@localhost:61693) [WARN -
org.apache.hadoop.hdfs.LeaseRenewer.run(LeaseRenewer.java:458)] Failed to
renew lease for
[DFSClient_hb_m_localhost,61702,1462987530289_-639707842_181] for 32
seconds.  Will retry shortly ...
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException):
SIMPLE authentication is not enabled.  Available:[TOKEN, KERBEROS]
at org.apache.hadoop.ipc.Client.call(Client.java:1468)
at org.apache.hadoop.ipc.Client.call(Client.java:1399)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy19.renewLease(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.renewLease(ClientNamenodeProtocolTranslatorPB.java:571)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy20.renewLease(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:279)
at com.sun.proxy.$Proxy21.renewLease(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient.renewLease(DFSClient.java:878)
at org.apache.hadoop.hdfs.LeaseRenewer.renew(LeaseRenewer.java:417)
at org.apache.hadoop.hdfs.LeaseRenewer.run(LeaseRenewer.java:442)
at org.apache.hadoop.hdfs.LeaseRenewer.access$700(LeaseRenewer.java:71)
at org.apache.hadoop.hdfs.LeaseRenewer$1.run(LeaseRenewer.java:298)
at java.lang.Thread.run(Thread.java:745)

Just one thing, when I am changing the authentication mode to SIMPLE, the
test is working fine but it is not valid because I am not using kerberos
authentication.

conf.set("hadoop.security.authentication", "SIMPLE")
conf.set(User.HBASE_SECURITY_CONF_KEY, "SIMPLE")

This is the entire code:

import java.io.File
import java.sql.DriverManager
import java.util.Properties

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost
import org.apache.hadoop.hbase.ipc.{RpcClientFactory, RpcClientImpl}
import org.apache.hadoop.hbase.security._
import org.apache.hadoop.hbase.security.token.TokenProvider
import org.apache.hadoop.hbase.testclassification.MediumTests
import org.apache.hadoop.hbase.util.FSUtils
import org.apache.hadoop.hbase.{HBaseTestingUtility, HConstants,
LocalHBaseCluster}
import org.apache.hadoop.hdfs.DFSConfigKeys
import org.apache.hadoop.http.HttpConfig
import org.apache.hadoop.minikdc.MiniKdc
import org.apache.hadoop.security.UserGroupInformation
import org.apache.phoenix.util.PhoenixRuntime._
import org.junit.experimental.categories.Category
import org.scalatest.{BeforeAndAfterAll, Suite}

@Category(Array(classOf[MediumTests]))
trait PhoenixKdcTest extends BeforeAndAfterAll { this: Suite =>
  private val TEST_UTIL: HBaseTestingUtility = new HBaseTestingUtility
  private var CLUSTER: LocalHBaseCluster = null
  private val KEYTAB_FILE: File = new
File(TEST_UTIL.getDataTestDir("keytab").toUri.getPath)
  private var KDC: MiniKdc = null
  private val HOST: String = "localhost"
  private var USERNAME: String = null
  private var PRINCIPAL: String = null
  private var KDC_PRINCIPAL: String = null
  private var HTTP_PRINCIPAL: String = null
  private val KRB_PRINCIPAL: String = "hbase.regionserver.kerberos.principal"
  private val MASTER_KRB_PRINCIPAL: String = "hbase.master.kerberos.principal"
  private val KRB_KEYTAB_FILE: String = "hbase.regionserver.keytab.file"

  lazy val hbaseConfiguration = {
    val conf = TEST_UTIL.getConfiguration
    val quorum = conf.get("hbase.zookeeper.quorum")
    val clientPort = conf.get("hbase.zookeeper.property.clientPort")
    val znodeParent = conf.get("zookeeper.znode.parent")
    conf.set(HConstants.ZOOKEEPER_QUORUM, s"$quorum:$clientPort:$znodeParent")
    conf
  }
  @throws(classOf[Exception])
  private def setHdfsSecuredConfiguration(conf: Configuration) {

    conf.set("hadoop.security.authentication", "KERBEROS")
    conf.set(User.HBASE_SECURITY_CONF_KEY, "KERBEROS")
    conf.setBoolean("hbase.security.authorization", true)
    conf.set(KRB_KEYTAB_FILE, System.getProperty(KRB_KEYTAB_FILE))
    conf.set(KRB_PRINCIPAL, System.getProperty(KRB_PRINCIPAL))
    conf.set(MASTER_KRB_PRINCIPAL, System.getProperty(KRB_PRINCIPAL))

    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, KDC_PRINCIPAL)
    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
KEYTAB_FILE.getAbsolutePath)
    conf.set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, KDC_PRINCIPAL)
    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
KEYTAB_FILE.getAbsolutePath)
    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
HTTP_PRINCIPAL + "@" + KDC.getRealm)
    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true)
    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
HttpConfig.Policy.HTTPS_ONLY.name)
    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0")
    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0")
    val keystoresDir: File = new
File(TEST_UTIL.getDataTestDir("keystore").toUri.getPath)
    keystoresDir.mkdirs
    val sslConfDir: String =
KeyStoreTestUtil.getClasspathDir(classOf[PhoenixKdcTest])
    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath,
sslConfDir, conf, false)
    conf.setBoolean("ignore.secure.ports.for.testing", true)

  }


  private def startMiniCluster(){
    val prop: Properties = MiniKdc.createConf
    prop.put(MiniKdc.DEBUG, "true")
    KDC = new MiniKdc(prop, new
File(TEST_UTIL.getDataTestDir("kdc").toUri.getPath))
    KDC.start
    USERNAME = UserGroupInformation.getLoginUser.getShortUserName
    PRINCIPAL = USERNAME + "/" + HOST
    HTTP_PRINCIPAL = "HTTP/" + HOST
    KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL)
    TEST_UTIL.startMiniZKCluster
    KDC_PRINCIPAL = PRINCIPAL + "@" + KDC.getRealm

    setSystemProperty("hbase.regionserver.keytab.file",
KEYTAB_FILE.getAbsolutePath);
    setSystemProperty("hbase.regionserver.kerberos.principal", KDC_PRINCIPAL);
    setHdfsSecuredConfiguration(TEST_UTIL.getConfiguration)
    UserGroupInformation.setConfiguration(TEST_UTIL.getConfiguration)
    TEST_UTIL.getConfiguration.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
classOf[TokenProvider].getName)
    TEST_UTIL.startMiniDFSCluster(1)
    val rootdir: Path =
TEST_UTIL.getDataTestDirOnTestFS("TestGenerateDelegationToken")
    FSUtils.setRootDir(TEST_UTIL.getConfiguration, rootdir)
    CLUSTER = new LocalHBaseCluster(TEST_UTIL.getConfiguration)

    User.login(TEST_UTIL.getConfiguration,KRB_KEYTAB_FILE,KRB_PRINCIPAL,HOST)

    CLUSTER.startup
  }

  override def beforeAll() {
    startMiniCluster()
    TEST_UTIL.getConfiguration.set(RpcClientFactory.CUSTOM_RPC_CLIENT_IMPL_CONF_KEY,
classOf[RpcClientImpl].getName)

    val url = JDBC_PROTOCOL + JDBC_PROTOCOL_SEPARATOR +
hbaseConfiguration.get(HConstants.ZOOKEEPER_QUORUM) +
JDBC_PROTOCOL_SEPARATOR + PRINCIPAL + JDBC_PROTOCOL_SEPARATOR +
KEYTAB_FILE

    // Setups a test table to test connectivity
    val conn = DriverManager.getConnection(url)
    conn.setAutoCommit(true)

    val stmt = conn.createStatement()
    stmt.execute("CREATE TABLE IF NOT EXISTS us_population(state
CHAR(2) NOT NULL, city VARCHAR, population BIGINT  CONSTRAINT my_pk
PRIMARY KEY (state))")
    stmt.execute("UPSERT INTO us_population values ('CA', 'San
Francisco', 837442)");
    stmt.execute("UPSERT INTO us_population values ('NY', 'New York',
11231312)");
    stmt.execute("UPSERT INTO us_population values ('TX', 'Texas', 6456465)");

    val resultSet = stmt.executeQuery("SELECT * FROM us_population")
    val resultStream = new Iterator[String] {
      def hasNext = resultSet.next()
      def next() = resultSet.getString(1)
    }.toStream
    assert(resultStream.count(_=>true)==3)

    conn.close()
  }

  override def afterAll(){
    if (CLUSTER != null) {
      CLUSTER.shutdown
    }
    CLUSTER.join
    if (KDC != null) {
      KDC.stop
    }
    TEST_UTIL.shutdownMiniCluster
  }

  private[security] def setSystemProperty(propertyName: String,
propertyValue: String){
    System.setProperty(propertyName, propertyValue)
  }

}

-- 
Un saludo - Best Regards.
Abel