You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/28 03:04:10 UTC

[09/15] directory-kerby git commit: Change the Maven groupId in HAS folder to org.apache.kerby.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/pom.xml
----------------------------------------------------------------------
diff --git a/has/has-server/pom.xml b/has/has-server/pom.xml
index 30a4aa8..4771741 100644
--- a/has/has-server/pom.xml
+++ b/has/has-server/pom.xml
@@ -5,7 +5,7 @@
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
-    <groupId>org.apache.hadoop</groupId>
+    <groupId>org.apache.kerby</groupId>
     <artifactId>has-project</artifactId>
     <version>1.0.0-SNAPSHOT</version>
   </parent>
@@ -90,7 +90,7 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
-      <version>3.0.0-alpha2</version>
+      <version>${hadoop.version}</version>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
@@ -109,7 +109,7 @@
       <version>RELEASE</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.kerby</groupId>
       <artifactId>has-common</artifactId>
       <version>${project.version}</version>
     </dependency>

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
deleted file mode 100644
index b4cd5d6..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/AbstractHasServerPlugin.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.kerby.kerberos.kerb.KrbRuntime;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class AbstractHasServerPlugin implements HasServerPlugin {
-
-    public static final Logger LOG = LoggerFactory.getLogger(AbstractHasServerPlugin.class);
-
-    protected abstract void doAuthenticate(AuthToken userToken, AuthToken authToken)
-        throws HasAuthenException;
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    public AuthToken authenticate(AuthToken userToken) throws HasAuthenException {
-
-        AuthToken authToken = KrbRuntime.getTokenProvider("JWT").createTokenFactory().createToken();
-
-        doAuthenticate(userToken, authToken);
-
-        return authToken;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
deleted file mode 100644
index 14df580..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasAuthenException.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.hadoop.has.common.HasException;
-
-public class HasAuthenException extends HasException {
-    private static final long serialVersionUID = 171016915395892939L;
-
-    public HasAuthenException(Throwable cause) {
-        super(cause);
-    }
-
-    public HasAuthenException(String message) {
-        super(message);
-    }
-
-    public HasAuthenException(String message, Throwable cause) {
-        super(message, cause);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
deleted file mode 100644
index cb22b8e..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServer.java
+++ /dev/null
@@ -1,701 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.commons.dbutils.DbUtils;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.web.WebConfigKey;
-import org.apache.hadoop.has.server.web.WebServer;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.kerby.kerberos.kdc.impl.NettyKdcServerImpl;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
-import org.apache.kerby.kerberos.kerb.client.ClientUtil;
-import org.apache.kerby.kerberos.kerb.client.KrbConfig;
-import org.apache.kerby.kerberos.kerb.client.KrbSetting;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.identity.backend.IdentityBackend;
-import org.apache.kerby.kerberos.kerb.server.KdcServer;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.util.IOUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Pattern;
-
-/**
- * The HAS KDC server implementation.
- */
-public class HasServer {
-    public static final Logger LOG = LoggerFactory.getLogger(HasServer.class);
-
-    private static HasServer server = null;
-
-    private KrbSetting krbSetting;
-    private KdcServer kdcServer;
-    private WebServer webServer;
-    private File confDir;
-    private File workDir;
-    private String kdcHost;
-    private HasConfig hasConfig;
-
-    public HasServer(File confDir) throws KrbException {
-        this.confDir = confDir;
-    }
-
-    private void setConfDir(File confDir) {
-        this.confDir = confDir;
-    }
-
-    public File getConfDir() {
-        return confDir;
-    }
-
-    public File getWorkDir() {
-        return workDir;
-    }
-
-    public void setWorkDir(File workDir) {
-        this.workDir = workDir;
-    }
-
-    public void setKdcHost(String host) {
-        this.kdcHost = host;
-    }
-
-    public String getKdcHost() {
-        return kdcHost;
-    }
-
-    public KrbSetting getKrbSetting() {
-        return krbSetting;
-    }
-
-    public KdcServer getKdcServer() {
-        return kdcServer;
-    }
-
-    public WebServer getWebServer() {
-        return webServer;
-    }
-
-    public void setWebServer(WebServer webServer) {
-        this.webServer = webServer;
-    }
-
-    public void startKdcServer() throws HasException {
-        BackendConfig backendConfig;
-        try {
-            backendConfig = KdcUtil.getBackendConfig(getConfDir());
-        } catch (KrbException e) {
-            throw new HasException("Failed to get backend config. " + e);
-        }
-        String backendJar = backendConfig.getString("kdc_identity_backend");
-        if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
-            updateKdcConf();
-        }
-        try {
-            kdcServer = new KdcServer(confDir);
-        } catch (KrbException e) {
-            throw new HasException("Failed to create KdcServer. " + e);
-        }
-        kdcServer.setWorkDir(workDir);
-        kdcServer.setInnerKdcImpl(new NettyKdcServerImpl(kdcServer.getKdcSetting()));
-        try {
-            kdcServer.init();
-        } catch (KrbException e) {
-            LOG.error("Errors occurred when init has kdc server:  " + e.getMessage());
-            throw new HasException("Errors occurred when init has kdc server:  " + e.getMessage());
-        }
-
-        KrbConfig krbConfig = null;
-        try {
-            krbConfig = ClientUtil.getConfig(confDir);
-        } catch (KrbException e) {
-            new HasException("Errors occurred when getting the config from conf dir. "
-                + e.getMessage());
-        }
-        if (krbConfig == null) {
-            krbConfig = new KrbConfig();
-        }
-        this.krbSetting = new KrbSetting(krbConfig);
-        try {
-            kdcServer.start();
-        } catch (KrbException e) {
-            throw new HasException("Failed to start kdc server. " + e);
-        }
-        try {
-            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "false");
-        } catch (Exception e) {
-            throw new HasException("Failed to enable conf. " + e);
-        }
-        setHttpFilter();
-    }
-
-    private void setHttpFilter() throws HasException {
-        File httpKeytabFile = new File(workDir, "http.keytab");
-        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
-            kdcServer.getIdentityService());
-        createHttpPrincipal(kadmin);
-        try {
-            kadmin.exportKeytab(httpKeytabFile, getHttpPrincipal());
-        } catch (KrbException e) {
-            throw new HasException("Failed to export keytab: " + e.getMessage());
-        }
-        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE,
-            hasConfig.getFilterAuthType());
-        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
-            getHttpPrincipal());
-        webServer.getConf().setString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
-            httpKeytabFile.getPath());
-        webServer.defineFilter();
-    }
-
-    public File initKdcServer() throws KrbException {
-        File adminKeytabFile = new File(workDir, "admin.keytab");
-        LocalKadmin kadmin = new LocalKadminImpl(kdcServer.getKdcSetting(),
-            kdcServer.getIdentityService());
-        if (adminKeytabFile.exists()) {
-            throw new KrbException("KDC Server is already inited.");
-        }
-        kadmin.createBuiltinPrincipals();
-        kadmin.exportKeytab(adminKeytabFile, kadmin.getKadminPrincipal());
-        System.out.println("The keytab for kadmin principal "
-            + " has been exported to the specified file "
-            + adminKeytabFile.getAbsolutePath() + ", please safely keep it, "
-            + "in order to use kadmin tool later");
-
-        return adminKeytabFile;
-    }
-
-    public void createHttpPrincipal(LocalKadmin kadmin) throws HasException {
-        String httpPrincipal = getHttpPrincipal();
-        IdentityBackend backend = kdcServer.getIdentityService();
-        try {
-            if (backend.getIdentity(httpPrincipal) == null) {
-                kadmin.addPrincipal(httpPrincipal);
-            } else {
-                LOG.info("The http principal already exists in backend.");
-            }
-        } catch (KrbException e) {
-            throw new HasException("Failed to add princial, " + e.getMessage());
-        }
-    }
-
-    public String getHttpPrincipal() throws HasException {
-        String realm = kdcServer.getKdcSetting().getKdcRealm();
-        String nameString;
-        try {
-            InetAddress addr = InetAddress.getLocalHost();
-            String fqName = addr.getCanonicalHostName();
-            nameString = "HTTP/" + fqName + "@" + realm;
-        } catch (UnknownHostException e) {
-            throw new HasException(e);
-        }
-        LOG.info("The http principal name is: " + nameString);
-        return nameString;
-    }
-
-    /**
-     * Update conf file.
-     *
-     * @param confName  conf file name
-     * @param values    customized values
-     * @throws IOException throw IOException
-     * @throws KrbException e
-     */
-    public void updateConfFile(String confName, Map<String, String> values)
-        throws IOException, HasException {
-        File confFile = new File(getConfDir().getAbsolutePath(), confName);
-        if (confFile.exists()) {
-            // Update conf file content
-            InputStream templateResource;
-            if (confName.equals("has-server.conf")) {
-                templateResource = new FileInputStream(confFile);
-            } else {
-                String resourcePath = "/" + confName + ".template";
-                templateResource = getClass().getResourceAsStream(resourcePath);
-            }
-            String content = IOUtil.readInput(templateResource);
-            for (Map.Entry<String, String> entry : values.entrySet()) {
-                content = content.replaceAll(Pattern.quote(entry.getKey()), entry.getValue());
-            }
-
-            // Delete the original conf file
-            boolean delete = confFile.delete();
-            if (!delete) {
-                throw new HasException("Failed to delete conf file: " + confName);
-            }
-
-            // Save the updated conf file
-            IOUtil.writeFile(content, confFile);
-        } else {
-            throw new HasException("Conf file: " + confName + " not found.");
-        }
-    }
-
-    /**
-     * Get KDC Config from MySQL.
-     *
-     * @return Kdc config
-     * @throws KrbException e
-     */
-    private Map<String, String> getKdcConf() throws HasException {
-        PreparedStatement preStm = null;
-        ResultSet result = null;
-        Map<String, String> kdcConf = new HashMap<>();
-        BackendConfig backendConfig;
-        try {
-            backendConfig = KdcUtil.getBackendConfig(getConfDir());
-        } catch (KrbException e) {
-            throw new HasException("Getting backend config failed." + e.getMessage());
-        }
-        String driver = backendConfig.getString("mysql_driver");
-        String url = backendConfig.getString("mysql_url");
-        String user = backendConfig.getString("mysql_user");
-        String password = backendConfig.getString("mysql_password");
-        Connection connection = startConnection(driver, url, user, password);
-        try {
-
-            // Get Kdc configuration from kdc_config table
-            String stmKdc = "SELECT * FROM `kdc_config` WHERE id = 1";
-            preStm = connection.prepareStatement(stmKdc);
-            result = preStm.executeQuery();
-            while (result.next()) {
-                String realm = result.getString("realm");
-                String servers = result.getString("servers");
-                String port = String.valueOf(result.getInt("port"));
-                kdcConf.put("servers", servers);
-                kdcConf.put("_PORT_", port);
-                kdcConf.put("_REALM_", realm);
-            }
-
-        } catch (SQLException e) {
-            LOG.error("Error occurred while getting kdc config.");
-            throw new HasException("Failed to get kdc config. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-            DbUtils.closeQuietly(result);
-            DbUtils.closeQuietly(connection);
-        }
-
-        return kdcConf;
-    }
-
-    /**
-     * Update KDC conf file.
-     *
-     * @throws KrbException e
-     */
-    private void updateKdcConf() throws HasException {
-        try {
-            Map<String, String> values = getKdcConf();
-            String host = getKdcHost();
-            if (host == null) {
-                host = getWebServer().getBindAddress().getHostName();
-            }
-            values.remove("servers");
-            values.put("_HOST_", host);
-            updateConfFile("kdc.conf", values);
-        } catch (IOException e) {
-            throw new HasException("Failed to update kdc config. ", e);
-        }
-    }
-
-    /**
-     * Start the MySQL connection.
-     *
-     * @param url url of connection
-     * @param user username of connection
-     * @param password password of connection
-     * @throws KrbException e
-     * @return MySQL JDBC connection
-     */
-    private Connection startConnection(String driver, String url, String user,
-                                       String password) throws HasException {
-        Connection connection;
-        try {
-            Class.forName(driver);
-            connection = DriverManager.getConnection(url, user, password);
-            if (!connection.isClosed()) {
-                LOG.info("Succeeded in connecting to MySQL.");
-            }
-        } catch (ClassNotFoundException e) {
-            throw new HasException("JDBC Driver Class not found. ", e);
-        } catch (SQLException e) {
-            throw new HasException("Failed to connecting to MySQL. ", e);
-        }
-
-        return connection;
-    }
-
-    /**
-     * Config HAS server KDC which have MySQL backend.
-     * @param backendConfig MySQL backend config
-     * @param realm KDC realm to set
-     * @param host KDC host to set
-     * @param hasServer has server to get param
-     * @throws HasException e
-     */
-    public void configMySQLKdc(BackendConfig backendConfig, String realm, int port,
-                               String host, HasServer hasServer) throws HasException {
-
-        // Start mysql connection
-        String driver = backendConfig.getString("mysql_driver");
-        String url = backendConfig.getString("mysql_url");
-        String user = backendConfig.getString("mysql_user");
-        String password = backendConfig.getString("mysql_password");
-        Connection connection = startConnection(driver, url, user, password);
-
-        ResultSet resConfig = null;
-        PreparedStatement preStm = null;
-        try {
-            createKdcTable(connection); // Create kdc_config table if not exists
-            String stm = "SELECT * FROM `kdc_config` WHERE id = 1";
-            preStm = connection.prepareStatement(stm);
-            resConfig = preStm.executeQuery();
-            if (!resConfig.next()) {
-                addKdcConfig(connection, realm, port, host);
-            } else {
-                String oldHost = hasServer.getKdcHost();
-                String servers = resConfig.getString("servers");
-                String[] serverArray = servers.split(",");
-                List<String> serverList = new ArrayList<>();
-                Collections.addAll(serverList, serverArray);
-                if (serverList.contains(oldHost)) {
-                    servers = servers.replaceAll(oldHost, host);
-                } else {
-                    servers = servers + "," + host;
-                }
-                boolean initialized = resConfig.getBoolean("initialized");
-                updateKdcConfig(connection, initialized, port, realm, servers);
-            }
-            hasServer.setKdcHost(host);
-        } catch (SQLException e) {
-            throw new HasException("Failed to config HAS KDC. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-            DbUtils.closeQuietly(resConfig);
-            DbUtils.closeQuietly(connection);
-        }
-    }
-
-    /**
-     * Create kdc_config table in database.
-     * @param conn database connection
-     * @throws KrbException e
-     */
-    private void createKdcTable(final Connection conn) throws HasException {
-        PreparedStatement preStm = null;
-        try {
-            String stm = "CREATE TABLE IF NOT EXISTS `kdc_config` ("
-                + "port INTEGER DEFAULT 88, servers VARCHAR(255) NOT NULL, "
-                + "initialized bool DEFAULT FALSE, realm VARCHAR(255) "
-                + "DEFAULT NULL, id INTEGER DEFAULT 1, CHECK (id=1), PRIMARY KEY (id)) "
-                + "ENGINE=INNODB;";
-            preStm = conn.prepareStatement(stm);
-            preStm.executeUpdate();
-        } catch (SQLException e) {
-            throw new HasException("Failed to create kdc_config table. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-        }
-    }
-
-    /**
-     * Add KDC Config information in database.
-     * @param conn database connection
-     * @param realm realm to add
-     * @param port port to add
-     * @param host host to add
-     */
-    private void addKdcConfig(Connection conn, String realm, int port, String host)
-        throws HasException {
-        PreparedStatement preStm = null;
-        try {
-            String stm = "INSERT INTO `kdc_config` (port, servers, realm)" + " VALUES(?, ?, ?)";
-            preStm = conn.prepareStatement(stm);
-            preStm.setInt(1, port);
-            preStm.setString(2, host);
-            preStm.setString(3, realm);
-            preStm.executeUpdate();
-        } catch (SQLException e) {
-            throw new HasException("Failed to insert into kdc_config table. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-        }
-    }
-
-    /**
-     * Update KDC Config record in database.
-     * @param conn database connection
-     * @param realm realm to update
-     * @param port port to update
-     * @param servers servers to update
-     * @param initialized initial state of KDC Config
-     */
-    private void updateKdcConfig(Connection conn, boolean initialized, int port,
-                                 String realm, String servers) throws HasException {
-        PreparedStatement preStm = null;
-        try {
-            if (initialized) {
-                String stmUpdate = "UPDATE `kdc_config` SET servers = ? WHERE id = 1";
-                preStm = conn.prepareStatement(stmUpdate);
-                preStm.setString(1, servers);
-                preStm.executeUpdate();
-            } else {
-                String stmUpdate = "UPDATE `kdc_config` SET port = ?, realm = ?, servers = ? WHERE id = 1";
-                preStm = conn.prepareStatement(stmUpdate);
-                preStm.setInt(1, port);
-                preStm.setString(2, realm);
-                preStm.setString(3, servers);
-                preStm.executeUpdate();
-            }
-        } catch (SQLException e) {
-            throw new HasException("Failed to update KDC Config. ", e);
-        } finally {
-            DbUtils.closeQuietly(preStm);
-        }
-    }
-
-    /**
-     * Read in krb5-template.conf and substitute in the correct port.
-     *
-     * @return krb5 conf file
-     * @throws IOException e
-     * @throws KrbException e
-     */
-    public File generateKrb5Conf() throws HasException {
-        Map<String, String> kdcConf = getKdcConf();
-        String[] servers = kdcConf.get("servers").split(",");
-        int kdcPort = Integer.parseInt(kdcConf.get("_PORT_"));
-        String kdcRealm = kdcConf.get("_REALM_");
-        StringBuilder kdcBuilder = new StringBuilder();
-        for (String server : servers) {
-            String append = "\t\tkdc = " + server.trim() + ":" + kdcPort + "\n";
-            kdcBuilder.append(append);
-        }
-        String kdc = kdcBuilder.toString();
-        kdc = kdc.substring(0, kdc.length() - 1);
-        String resourcePath = "/krb5.conf.template";
-        InputStream templateResource = getClass().getResourceAsStream(resourcePath);
-        String content = null;
-        try {
-            content = IOUtil.readInput(templateResource);
-        } catch (IOException e) {
-            throw new HasException("Read template resource failed. " + e);
-        }
-        content = content.replaceAll("_REALM_", kdcRealm);
-        content = content.replaceAll("_PORT_", String.valueOf(kdcPort));
-        content = content.replaceAll("_UDP_LIMIT_", "4096");
-        content = content.replaceAll("_KDCS_", kdc);
-        File confFile = new File(confDir, "krb5.conf");
-        if (confFile.exists()) {
-            boolean delete = confFile.delete();
-            if (!delete) {
-                throw new HasException("File delete error!");
-            }
-        }
-        try {
-            IOUtil.writeFile(content, confFile);
-        } catch (IOException e) {
-            throw new HasException("Write content to conf file failed. " + e);
-        }
-
-        return confFile;
-    }
-
-    /**
-     * Read in has-server.conf and create has-client.conf.
-     *
-     * @return has conf file
-     * @throws IOException e
-     * @throws HasException e
-     */
-    public File generateHasConf() throws HasException, IOException {
-        Map<String, String> kdcConf = getKdcConf();
-        String servers = kdcConf.get("servers");
-        File confFile = new File(getConfDir().getAbsolutePath(), "has-server.conf");
-        HasConfig hasConfig = HasUtil.getHasConfig(confFile);
-        if (hasConfig != null) {
-            String defaultValue = hasConfig.getHttpsHost();
-            InputStream templateResource = new FileInputStream(confFile);
-            String content = IOUtil.readInput(templateResource);
-            content = content.replaceFirst(Pattern.quote(defaultValue), servers);
-            File hasFile = new File(confDir, "has-client.conf");
-            IOUtil.writeFile(content, hasFile);
-            return hasFile;
-        } else {
-            throw new HasException("has-server.conf not found. ");
-        }
-    }
-
-    public void stopKdcServer() {
-        try {
-            kdcServer.stop();
-        } catch (KrbException e) {
-            LOG.error("Fail to stop has kdc server");
-        }
-    }
-
-    public void startWebServer() throws HasException {
-        if (webServer == null) {
-            HasConfig conf = new HasConfig();
-
-            // Parse has-server.conf to get http_host and http_port
-            File confFile = new File(confDir, "has-server.conf");
-            hasConfig = HasUtil.getHasConfig(confFile);
-            if (hasConfig != null) {
-                try {
-                    String httpHost;
-                    String httpPort;
-                    String httpsHost;
-                    String httpsPort;
-                    if (hasConfig.getHttpHost() != null) {
-                        httpHost = hasConfig.getHttpHost();
-                    } else {
-                        LOG.info("Cannot get the http_host from has-server.conf, using the default http host.");
-                        httpHost = WebConfigKey.HAS_HTTP_HOST_DEFAULT;
-                    }
-                    if (hasConfig.getHttpPort() != null) {
-                        httpPort = hasConfig.getHttpPort();
-                    } else {
-                        LOG.info("Cannot get the http_port from has-server.conf, using the default http port.");
-                        httpPort = String.valueOf(WebConfigKey.HAS_HTTP_PORT_DEFAULT);
-                    }
-                    if (hasConfig.getHttpsHost() != null) {
-                        httpsHost = hasConfig.getHttpsHost();
-                    } else {
-                        LOG.info("Cannot get the https_host from has-server.conf, using the default https host.");
-                        httpsHost = WebConfigKey.HAS_HTTPS_HOST_DEFAULT;
-                    }
-                    if (hasConfig.getHttpsPort() != null) {
-                        httpsPort = hasConfig.getHttpsPort();
-                    } else {
-                        LOG.info("Cannot get the https_port from has-server.conf , using the default https port.");
-                        httpsPort = String.valueOf(WebConfigKey.HAS_HTTPS_PORT_DEFAULT);
-                    }
-                    String hasHttpAddress = httpHost + ":" + httpPort;
-                    String hasHttpsAddress = httpsHost + ":" + httpsPort;
-                    LOG.info("The web server http address: " + hasHttpAddress);
-                    LOG.info("The web server https address: " + hasHttpsAddress);
-
-                    conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY, hasHttpAddress);
-                    conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY, hasHttpsAddress);
-                    conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY,
-                        HttpConfig.Policy.HTTP_AND_HTTPS.name());
-                    conf.setString(WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-                        hasConfig.getSslServerConf());
-                    webServer = new WebServer(conf);
-                } catch (NumberFormatException e) {
-                    throw new IllegalArgumentException("https_port should be a number. "
-                        + e.getMessage());
-                }
-            } else {
-                throw new HasException("has-server.conf not found in " + confDir + ". ");
-            }
-        } else {
-            hasConfig = webServer.getConf();
-        }
-        webServer.start();
-        webServer.defineConfFilter();
-        try {
-            HasUtil.setEnableConf(new File(confDir, "has-server.conf"), "true");
-        } catch (IOException e) {
-            throw new HasException("Errors occurred when enable conf. " + e.getMessage());
-        }
-        webServer.setWebServerAttribute(this);
-    }
-
-    public void stopWebServer() {
-        if (webServer != null) {
-            try {
-                webServer.stop();
-            } catch (Exception e) {
-                LOG.error("Failed to stop http server. " + e.getMessage());
-            }
-        }
-    }
-
-    public static void main(String[] args) {
-        if (args[0].equals("-start")) {
-            String confDirPath = args[1];
-            String workDirPath = args[2];
-            File confDir = new File(confDirPath);
-            File workDir = new File(workDirPath);
-            if (!confDir.exists() || !workDir.exists()) {
-                LOG.error("Invalid or not exist conf-dir or work-dir");
-                System.exit(3);
-            }
-            try {
-                server = new HasServer(confDir);
-            } catch (KrbException e) {
-                LOG.error("Errors occurred when create kdc server:  " + e.getMessage());
-                System.exit(4);
-            }
-            server.setConfDir(confDir);
-            server.setWorkDir(workDir);
-            //Only start the webserver, the kdcserver can start after setting the realm
-            try {
-                server.startWebServer();
-            } catch (HasException e) {
-                LOG.error("Errors occurred when start has http server:  " + e.getMessage());
-                System.exit(6);
-            }
-
-            if (server.getWebServer().getHttpAddress() != null) {
-                LOG.info("HAS http server started.");
-                LOG.info("host: " + server.getWebServer().getHttpAddress().getHostName());
-                LOG.info("port: " + server.getWebServer().getHttpAddress().getPort());
-            }
-            if (server.getWebServer().getHttpsAddress() != null) {
-                LOG.info("HAS https server started.");
-                LOG.info("host: " + server.getWebServer().getHttpsAddress().getHostName());
-                LOG.info("port: " + server.getWebServer().getHttpsAddress().getPort());
-            }
-        } else if (args[0].equals("-stop")) {
-            if (server != null) {
-                server.stopWebServer();
-                server.stopKdcServer();
-            }
-        } else {
-            System.exit(2);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
deleted file mode 100644
index 6650308..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPlugin.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-
-public interface HasServerPlugin {
-        /**
-         * Get the login module type ID, used to distinguish this module from others.
-         * Should correspond to the client side module.
-         *
-         * @return login type
-         */
-        String getLoginType();
-
-        /**
-         * Perform all the server side authentication logics, the results wrapped in an AuthToken,
-         * will be used to exchange a Kerberos ticket.
-         *
-         * @param userToken user token
-         * @return auth token
-         */
-        AuthToken authenticate(AuthToken userToken) throws HasAuthenException;
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
deleted file mode 100644
index 621b321..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/HasServerPluginRegistry.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.has.server;
-
-import org.apache.hadoop.has.common.HasException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-
-public class HasServerPluginRegistry {
-    static final Logger LOG = LoggerFactory.getLogger(HasServerPluginRegistry.class);
-
-    private static Map<String, Class> allPlugins = new ConcurrentHashMap<>();
-
-    static {
-        ServiceLoader<HasServerPlugin> plugins = ServiceLoader.load(HasServerPlugin.class);
-
-        for (HasServerPlugin plugin : plugins) {
-            allPlugins.put(plugin.getLoginType(), plugin.getClass());
-        }
-    }
-
-    public static Set<String> registeredPlugins() {
-        return Collections.unmodifiableSet(allPlugins.keySet());
-    }
-
-    public static boolean registeredPlugin(String name) {
-        return allPlugins.containsKey(name);
-    }
-
-    public static HasServerPlugin createPlugin(String name) throws HasException {
-        if (!registeredPlugin(name)) {
-            throw new HasException("Unregistered plugin " + name);
-        }
-        try {
-            HasServerPlugin serverPlugin = (HasServerPlugin) allPlugins.get(name).newInstance();
-            return serverPlugin;
-        } catch (Exception e) {
-            LOG.error("Create {} plugin failed", name, e);
-            throw new HasException(e.getMessage());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
deleted file mode 100644
index b49c255..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
+++ /dev/null
@@ -1,382 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.admin;
-
-import org.apache.hadoop.has.common.HasAdmin;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.hadoop.has.server.web.HostRoleType;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
-import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
-import org.apache.kerby.kerberos.kerb.common.KrbUtil;
-import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
-import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
-import org.apache.kerby.kerberos.kerb.server.KdcConfig;
-import org.apache.kerby.kerberos.kerb.server.KdcSetting;
-import org.apache.kerby.kerberos.kerb.server.KdcUtil;
-import org.apache.kerby.kerberos.kerb.server.ServerSetting;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-public class LocalHasAdmin implements HasAdmin {
-    public static final Logger LOG = LoggerFactory.getLogger(LocalHasAdmin.class);
-
-    private final ServerSetting serverSetting;
-    private File confDir;
-
-    public LocalHasAdmin(HasServer hasServer) throws KrbException {
-        if (hasServer.getKdcServer() == null) {
-            throw new RuntimeException("Could not get HAS KDC server, please start KDC first.");
-        }
-        this.serverSetting = hasServer.getKdcServer().getKdcSetting();
-    }
-
-    /**
-     * Construct with prepared conf dir.
-     *
-     * @param confDir The path of conf dir
-     * @throws KrbException e
-     */
-    public LocalHasAdmin(File confDir) throws KrbException {
-        this.confDir = confDir;
-        KdcConfig tmpKdcConfig = KdcUtil.getKdcConfig(confDir);
-        if (tmpKdcConfig == null) {
-            tmpKdcConfig = new KdcConfig();
-        }
-
-        BackendConfig tmpBackendConfig = KdcUtil.getBackendConfig(confDir);
-        if (tmpBackendConfig == null) {
-            tmpBackendConfig = new BackendConfig();
-        }
-
-        this.serverSetting = new KdcSetting(tmpKdcConfig, tmpBackendConfig);
-    }
-
-    @Override
-    public List<String> getPrincipals(String exp) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        List<String> princs = null;
-        LOG.info("The value of exp is : " + exp);
-        if (exp == null || exp.equals("")) {
-            try {
-                princs = kadmin.getPrincipals();
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        } else {
-            try {
-                princs = kadmin.getPrincipals(exp);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-        return princs;
-    }
-
-    @Override
-    public void addPrincipal(String principal, String password) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        if (principal == null) {
-            throw new HasException("Value of principal is null.");
-        }
-        if (password == null || password.equals("")) {
-            try {
-                kadmin.addPrincipal(principal);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        } else {
-            try {
-                kadmin.addPrincipal(principal, password);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-        LOG.info("Success to add principal :" + principal);
-    }
-
-    @Override
-    public void deletePrincipal(String principal) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-             throw new HasException(e);
-        }
-        if (principal == null) {
-            throw new IllegalArgumentException("Value of principal is null.");
-        }
-        try {
-            kadmin.deletePrincipal(principal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        LOG.info("Success to delete principal :" + principal);
-    }
-
-    @Override
-    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            kadmin.renamePrincipal(oldPrincipal, newPrincipal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        LOG.info("Success to rename principal : \"" + oldPrincipal
-                + "\" to \"" + newPrincipal + "\".");
-    }
-
-    @Override
-    public String addPrincByRole(String host, String role) throws HasException {
-        String result = "";
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        String releam = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
-        String[] princs = HostRoleType.valueOf(role).getPrincs();
-        if (princs == null) {
-            LOG.error("Cannot find the role of : " + role);
-            return "Cannot find the role of : " + role;
-        }
-        for (String princ : princs) {
-            try {
-                kadmin.addPrincipal(princ + releam);
-                LOG.info("Success to add princ :" + princ + releam);
-                result = result + "Success to add princ :" + princ + releam + "\n";
-            } catch (KrbException e) {
-                LOG.info(e.getMessage());
-                result = e.getMessage() + "\n";
-            }
-        }
-        return result;
-    }
-
-    @Override
-    public File getKeytabByHostAndRole(String host, String role) throws HasException {
-        LocalKadmin kadmin;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
-        File path = new File("/tmp/" + System.currentTimeMillis());
-        path.mkdirs();
-        File keytab = new File(path, role + "-" + host + ".keytab");
-        if (keytab.exists()) {
-            keytab.delete();
-        }
-        String[] princs = HostRoleType.valueOf(role).getPrincs();
-        for (String princ : princs) {
-            try {
-                if (kadmin.getPrincipal(princ + realm) == null) {
-                    continue;
-                }
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-            try {
-                kadmin.exportKeytab(keytab, princ + realm);
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-        return keytab;
-    }
-
-    public void getKeytabByHostAndRole(String host, String role, File keytab) throws HasException {
-        LocalKadmin kadmin;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
-        if (keytab.exists()) {
-            keytab.delete();
-        }
-        String[] princs = HostRoleType.valueOf(role).getPrincs();
-        for (String princ : princs) {
-            try {
-                if (kadmin.getPrincipal(princ + realm) == null) {
-                    continue;
-                }
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-            try {
-                kadmin.exportKeytab(keytab, princ + realm);
-                System.out.println("Success to export keytab : " + keytab.getAbsolutePath());
-            } catch (KrbException e) {
-                throw new HasException(e);
-            }
-        }
-    }
-
-    @Override
-    public List<String> getPrincipals() throws HasException {
-        LocalKadmin kadmin;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            return kadmin.getPrincipals();
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-    }
-
-    public KrbIdentity getPrincipal(String principalName) throws HasException {
-        LocalKadmin kadmin;
-        KrbIdentity identity;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            identity = kadmin.getPrincipal(principalName);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        return identity;
-    }
-
-    @Override
-    public void addPrincipal(String principal) throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        if (principal == null) {
-            throw new HasException("Value of principal is null.");
-        }
-        try {
-            kadmin.addPrincipal(principal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        LOG.info("Success to add principal :" + principal);
-    }
-
-    @Override
-    public String getHadminPrincipal() {
-        return KrbUtil.makeKadminPrincipal(serverSetting.getKdcRealm()).getName();
-    }
-
-    /**
-     * get size of principal
-     */
-    @Override
-    public int size() throws HasException {
-        return this.getPrincipals().size();
-    }
-
-    @Override
-    public void setEnableOfConf(String isEnable) throws HasException {
-        File hasConf = new File(confDir, "has-server.conf");
-        if (!hasConf.exists()) {
-            System.err.println("has-server.conf is not exists.");
-            return;
-        }
-        try {
-            HasUtil.setEnableConf(hasConf, isEnable);
-        } catch (IOException e) {
-            System.err.println(e.getMessage());
-            return;
-        }
-    }
-
-    @Override
-    public void exportKeytab(File keytabFile, String principal)
-        throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            kadmin.exportKeytab(keytabFile, principal);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-    }
-
-    @Override
-    public void exportKeytab(File keytabFile, List<String> principals)
-            throws HasException {
-        LocalKadmin kadmin = null;
-        try {
-            kadmin = new LocalKadminImpl(serverSetting);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-        try {
-            kadmin.exportKeytab(keytabFile, principals);
-        } catch (KrbException e) {
-            throw new HasException(e);
-        }
-    }
-
-    public void getHostRoles() {
-        for (HostRoleType role : HostRoleType.values()) {
-            System.out.print("\tHostRole: " + role.getName()
-                    + ", PrincipalNames: ");
-            String[] princs = role.getPrincs();
-            for (int j = 0; j < princs.length; j++) {
-                System.out.print(princs[j]);
-                if (j == princs.length - 1) {
-                    System.out.println();
-                } else {
-                    System.out.print(", ");
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
deleted file mode 100644
index f880c48..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
+++ /dev/null
@@ -1,315 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.kdc;
-
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.apache.kerby.kerberos.kerb.KrbCodec;
-import org.apache.kerby.kerberos.kerb.KrbErrorCode;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.client.KrbContext;
-import org.apache.kerby.kerberos.kerb.common.EncryptionUtil;
-import org.apache.kerby.kerberos.kerb.common.KrbUtil;
-import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
-import org.apache.kerby.kerberos.kerb.server.KdcContext;
-import org.apache.kerby.kerberos.kerb.server.KdcRecoverableException;
-import org.apache.kerby.kerberos.kerb.server.KdcServer;
-import org.apache.kerby.kerberos.kerb.server.preauth.PreauthHandler;
-import org.apache.kerby.kerberos.kerb.server.request.AsRequest;
-import org.apache.kerby.kerberos.kerb.server.request.KdcRequest;
-import org.apache.kerby.kerberos.kerb.type.KerberosTime;
-import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
-import org.apache.kerby.kerberos.kerb.type.base.HostAddress;
-import org.apache.kerby.kerberos.kerb.type.base.HostAddresses;
-import org.apache.kerby.kerberos.kerb.type.base.KrbError;
-import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
-import org.apache.kerby.kerberos.kerb.type.base.KrbToken;
-import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
-import org.apache.kerby.kerberos.kerb.type.base.TokenFormat;
-import org.apache.kerby.kerberos.kerb.type.kdc.AsReq;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcOption;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcOptions;
-import org.apache.kerby.kerberos.kerb.type.kdc.KdcReqBody;
-import org.apache.kerby.kerberos.kerb.type.pa.PaData;
-import org.apache.kerby.kerberos.kerb.type.pa.PaDataEntry;
-import org.apache.kerby.kerberos.kerb.type.pa.PaDataType;
-import org.apache.kerby.kerberos.kerb.type.pa.token.PaTokenRequest;
-import org.apache.kerby.kerberos.kerb.type.pa.token.TokenInfo;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class HasKdcHandler {
-    private static final Logger LOG = LoggerFactory.getLogger(HasKdcHandler.class);
-
-    private KdcContext kdcContext;
-    private KrbContext krbContext;
-    private KdcServer kdcServer;
-
-    /**
-     * Constructor with has server.
-     *
-     * @param hasServer has server
-     */
-    public HasKdcHandler(HasServer hasServer) {
-        this.krbContext = new KrbContext();
-        this.krbContext.init(hasServer.getKrbSetting());
-        this.kdcServer = hasServer.getKdcServer();
-        prepareHandler(kdcServer);
-    }
-
-    public KrbContext getKrbContext() {
-        return krbContext;
-    }
-
-    public KdcContext getKdcContext() {
-        return kdcContext;
-    }
-
-    private KdcServer getKdcServer() {
-        return kdcServer;
-    }
-
-    private void prepareHandler(KdcServer kdcServer) {
-        this.kdcContext = new KdcContext(kdcServer.getKdcSetting());
-        this.kdcContext.setIdentityService(kdcServer.getIdentityService());
-        PreauthHandler preauthHandler = new PreauthHandler();
-        preauthHandler.init();
-        this.kdcContext.setPreauthHandler(preauthHandler);
-    }
-
-    private String getAudience(String name) {
-        return name + "/" + getKdcContext().getKdcRealm() + "@" + getKdcContext().getKdcRealm();
-    }
-
-    public KrbMessage getResponse(AuthToken authToken, String passPhrase) {
-        KrbMessage krbMessage = null;
-        try {
-            krbMessage = handleMessage(authToken, passPhrase);
-        } catch (KrbException e) {
-            LOG.error("Failed to handle message. " + e.getMessage());
-        }
-        return krbMessage;
-    }
-
-    /**
-     * Process the client request message.
-     */
-    public KrbMessage handleMessage(AuthToken authToken, String passPhrase) throws KrbException {
-
-        // set the audiences
-        List<String> auds = new ArrayList<String>();
-        String audience = getAudience("krbtgt");
-        auds.add(audience);
-        authToken.setAudiences(auds);
-
-        AsReq asReq = createAsReq(authToken);
-        KdcRequest kdcRequest = new AsRequest(asReq, kdcContext);
-        kdcRequest.setHttps(true);
-        List<EncryptionType> requestedTypes = getEncryptionTypes();
-        EncryptionType bestType = EncryptionUtil.getBestEncryptionType(requestedTypes,
-                kdcContext.getConfig().getEncryptionTypes());
-
-        if (bestType == null) {
-            LOG.error("Can't get the best encryption type.");
-            throw new KrbException(KrbErrorCode.KDC_ERR_ETYPE_NOSUPP);
-        }
-
-        PrincipalName clientPrincipal = new PrincipalName(authToken.getSubject());
-        String clientRealm = asReq.getReqBody().getRealm();
-        if (clientRealm == null || clientRealm.isEmpty()) {
-            clientRealm = getKdcContext().getKdcRealm();
-        }
-        clientPrincipal.setRealm(clientRealm);
-
-        // Set the client key
-        EncryptionKey clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
-            passPhrase, bestType);
-        kdcRequest.setClientKey(clientKey);
-
-        // Set the token issuers
-        getKdcServer().getKdcConfig().setString(KdcConfigKey.TOKEN_ISSUERS, "has");
-
-        KrbMessage krbResponse;
-
-        try {
-            kdcRequest.process();
-            krbResponse = kdcRequest.getReply();
-        } catch (KrbException e) {
-            LOG.error("Error occurred when request tgt. " + e.getMessage());
-            if (e instanceof KdcRecoverableException) {
-                krbResponse = handleRecoverableException(
-                        (KdcRecoverableException) e, kdcRequest);
-            } else {
-                KrbError krbError = new KrbError();
-                krbError.setStime(KerberosTime.now());
-                krbError.setSusec(100);
-                if (e.getKrbErrorCode() != null) {
-                    krbError.setErrorCode(e.getKrbErrorCode());
-                } else {
-                    krbError.setErrorCode(KrbErrorCode.UNKNOWN_ERR);
-                }
-                krbError.setCrealm(kdcContext.getKdcRealm());
-                if (kdcRequest.getClientPrincipal() != null) {
-                    krbError.setCname(kdcRequest.getClientPrincipal());
-                }
-                krbError.setRealm(kdcContext.getKdcRealm());
-                if (kdcRequest.getServerPrincipal() != null) {
-                    krbError.setSname(kdcRequest.getServerPrincipal());
-                } else {
-                    PrincipalName serverPrincipal = kdcRequest.getKdcReq().getReqBody().getSname();
-                    serverPrincipal.setRealm(kdcRequest.getKdcReq().getReqBody().getRealm());
-                    krbError.setSname(serverPrincipal);
-                }
-                if (KrbErrorCode.KRB_AP_ERR_BAD_INTEGRITY.equals(e.getKrbErrorCode())) {
-                    krbError.setEtext("PREAUTH_FAILED");
-                } else {
-                    krbError.setEtext(e.getMessage());
-                }
-                krbResponse = krbError;
-            }
-        }
-        return krbResponse;
-    }
-
-    /**
-     * Process the recoverable exception.
-     *
-     * @param e The exception return by kdc
-     * @param kdcRequest kdc request
-     * @return The KrbError
-     */
-    private KrbMessage handleRecoverableException(KdcRecoverableException e,
-                                                  KdcRequest kdcRequest)
-            throws KrbException {
-        LOG.info("KRB error occurred while processing request:"
-                + e.getMessage());
-
-        KrbError error = e.getKrbError();
-        error.setStime(KerberosTime.now());
-        error.setSusec(100);
-        error.setErrorCode(e.getKrbError().getErrorCode());
-        error.setRealm(kdcContext.getKdcRealm());
-        if (kdcRequest != null) {
-            error.setSname(kdcRequest.getKdcReq().getReqBody().getCname());
-        } else {
-            error.setSname(new PrincipalName("NONE"));
-        }
-        error.setEtext(e.getMessage());
-        return error;
-    }
-
-    public AsReq createAsReq(AuthToken authToken) throws KrbException {
-        AsReq asReq = new AsReq();
-        KdcReqBody body = makeReqBody();
-        asReq.setReqBody(body);
-
-        PaTokenRequest tokenPa = new PaTokenRequest();
-        KrbToken krbToken = new KrbToken(authToken, TokenFormat.JWT);
-        tokenPa.setToken(krbToken);
-        TokenInfo info = new TokenInfo();
-        info.setTokenVendor(authToken.getIssuer());
-        tokenPa.setTokenInfo(info);
-
-        PaDataEntry paDataEntry = new PaDataEntry();
-        paDataEntry.setPaDataType(PaDataType.TOKEN_REQUEST);
-        paDataEntry.setPaDataValue(KrbCodec.encode(tokenPa));
-
-        PaData paData = new PaData();
-        paData.addElement(paDataEntry);
-        asReq.setPaData(paData);
-        return asReq;
-    }
-
-    /**
-     * Create the KdcReqBody
-     *
-     * @return KdcReqBody
-     *
-     * @throws KrbException e
-     */
-     protected KdcReqBody makeReqBody() throws KrbException {
-        KdcReqBody body = new KdcReqBody();
-
-        long startTime = System.currentTimeMillis();
-        body.setFrom(new KerberosTime(startTime));
-
-         // set the client principal as null
-        PrincipalName cName = null;
-        body.setCname(cName);
-
-        body.setRealm(getKrbContext().getKrbSetting().getKdcRealm());
-
-        PrincipalName sName = getServerPrincipal();
-        body.setSname(sName);
-
-        body.setTill(new KerberosTime(startTime + krbContext.getTicketValidTime()));
-
-        int nonce = krbContext.generateNonce();
-        body.setNonce(nonce);
-//        setChosenNonce(nonce);
-
-        body.setKdcOptions(getKdcOptions());
-
-        HostAddresses addresses = getHostAddresses();
-        if (addresses != null) {
-            body.setAddresses(addresses);
-        }
-
-        body.setEtypes(getEncryptionTypes());
-
-        return body;
-    }
-
-    private PrincipalName getServerPrincipal() {
-        return KrbUtil.makeTgsPrincipal(getKrbContext().getKrbSetting().getKdcRealm());
-    }
-
-    private KdcOptions getKdcOptions() {
-        KdcOptions kdcOptions = new KdcOptions();
-        // By default enforce these flags
-        kdcOptions.setFlag(KdcOption.FORWARDABLE);
-        kdcOptions.setFlag(KdcOption.PROXIABLE);
-        kdcOptions.setFlag(KdcOption.RENEWABLE_OK);
-        return kdcOptions;
-    }
-
-    public HostAddresses getHostAddresses() {
-        List<HostAddress> hostAddresses = new ArrayList<HostAddress>();
-        HostAddresses addresses = null;
-        //empty
-        if (!hostAddresses.isEmpty()) {
-            addresses = new HostAddresses();
-            for (HostAddress ha : hostAddresses) {
-                addresses.addElement(ha);
-            }
-        }
-        return addresses;
-    }
-
-    public List<EncryptionType> getEncryptionTypes() {
-        List<EncryptionType> encryptionTypes = krbContext.getConfig().getEncryptionTypes();
-        return EncryptionUtil.orderEtypesByStrength(encryptionTypes);
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
deleted file mode 100644
index 3f397fb..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.kdc;
-
-import org.apache.kerby.config.ConfigKey;
-
-/**
- * Define all the MySQL backend related configuration items with default values.
- */
-public enum MySQLConfKey implements ConfigKey {
-    MYSQL_DRIVER("com.mysql.jdbc.Driver"),
-    MYSQL_URL("jdbc:mysql://127.0.0.1:3306/mysqlbackend"),
-    MYSQL_USER("root"),
-    MYSQL_PASSWORD("passwd");
-
-    private Object defaultValue;
-
-    MySQLConfKey() {
-        this.defaultValue = null;
-    }
-
-    MySQLConfKey(Object defaultValue) {
-        this.defaultValue = defaultValue;
-    }
-
-    @Override
-    public String getPropertyKey() {
-        return name().toLowerCase();
-    }
-
-    @Override
-    public Object getDefaultValue() {
-        return this.defaultValue;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
deleted file mode 100644
index ec02312..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
+++ /dev/null
@@ -1,426 +0,0 @@
-/**
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- *
- */
-package org.apache.hadoop.has.server.kdc;
-
-import org.apache.commons.dbutils.DbUtils;
-import org.apache.directory.api.util.GeneralizedTime;
-import org.apache.kerby.config.Config;
-import org.apache.kerby.kerberos.kerb.KrbException;
-import org.apache.kerby.kerberos.kerb.identity.backend.AbstractIdentityBackend;
-import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
-import org.apache.kerby.kerberos.kerb.type.KerberosTime;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
-import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.ResultSet;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import javax.sql.rowset.serial.SerialBlob;
-import java.util.Map;
-import java.util.List;
-import java.util.ArrayList;
-import java.text.ParseException;
-
-/**
- * A MySQL based backend implementation.
- */
-public class MySQLIdentityBackend extends AbstractIdentityBackend {
-    private Connection connection;
-    private String driver;
-    private String url;
-    private String user;
-    private String password;
-    private static final Logger LOG = LoggerFactory.getLogger(MySQLIdentityBackend.class);
-    private String identityTable;
-    private String keyInfoTable;
-
-    /**
-     * Constructing an instance using specified config that contains anything
-     * to be used to initialize an MySQL Backend.
-     * @param config . The config is used to config the backend.
-     */
-    public MySQLIdentityBackend(final Config config) {
-        setConfig(config);
-    }
-
-    public MySQLIdentityBackend() { }
-
-    /**
-     * Start the MySQL connection.
-     */
-    private void startConnection() throws KrbException {
-        try {
-            Class.forName(driver);
-            connection = DriverManager.getConnection(url, user, password);
-            if (!connection.isClosed()) {
-                LOG.info("Succeeded in connecting to MySQL.");
-            }
-        } catch (ClassNotFoundException e) {
-            throw new KrbException("JDBC Driver Class not found. ", e);
-        } catch (SQLException e) {
-            throw new KrbException("Failed to connecting to MySQL. ", e);
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected void doInitialize() throws KrbException {
-        LOG.info("Initializing the MySQL identity backend.");
-        driver = getConfig().getString(MySQLConfKey.MYSQL_DRIVER, true);
-        user = getConfig().getString(MySQLConfKey.MYSQL_USER, true);
-        password = getConfig().getString(MySQLConfKey.MYSQL_PASSWORD, true);
-
-        String urlString = getConfig().getString(MySQLConfKey.MYSQL_URL, true);
-        if (urlString == null || urlString.isEmpty()) {
-            urlString = getBackendConfig().getString(MySQLConfKey.MYSQL_URL, true);
-        }
-        url = urlString;
-
-        PreparedStatement preInitialize = null;
-        PreparedStatement preKdcRealm = null;
-        ResultSet resKdcRelam = null;
-        PreparedStatement preIdentity = null;
-        PreparedStatement preKey = null;
-        try {
-            startConnection();
-
-            // Set initialized for kdc config
-            String stmInitialize = "UPDATE `kdc_config` SET initialized = true WHERE id = 1";
-            preInitialize = connection.prepareStatement(stmInitialize);
-            preInitialize.executeUpdate();
-
-            // Get identity table name according to realm of kdc
-            String stmKdcRealm = "SELECT realm FROM `kdc_config`";
-            preKdcRealm = connection.prepareStatement(stmKdcRealm);
-            resKdcRelam = preKdcRealm.executeQuery();
-            if (resKdcRelam.next()) {
-                String realm = resKdcRelam.getString("realm").toLowerCase();
-                identityTable = "`" + realm + "_identity" + "`";
-                keyInfoTable = "`" + realm + "_key" + "`";
-            } else {
-                throw new KrbException("Failed to get kdc config.");
-            }
-
-            // Create identity table
-            String stmIdentity = "CREATE TABLE IF NOT EXISTS " + identityTable
-                + " (principal varchar(255) NOT NULL, key_version INTEGER "
-                + "DEFAULT 1, kdc_flags INTEGER DEFAULT 0, disabled bool "
-                + "DEFAULT NULL, locked bool DEFAULT NULL, expire_time "
-                + "VARCHAR(255) DEFAULT NULL, created_time VARCHAR(255) "
-                + "DEFAULT NULL, PRIMARY KEY (principal) ) ENGINE=INNODB;";
-            preIdentity = connection.prepareStatement(stmIdentity);
-            preIdentity.executeUpdate();
-
-            // Create key table
-            String stmKey = "CREATE TABLE IF NOT EXISTS " + keyInfoTable
-                + " (key_id INTEGER NOT NULL AUTO_INCREMENT, key_type "
-                + "VARCHAR(255) DEFAULT NULL, kvno INTEGER DEFAULT -1, "
-                + "key_value BLOB DEFAULT NULL, principal VARCHAR(255) NOT NULL,"
-                + "PRIMARY KEY (key_id), INDEX (principal), FOREIGN KEY "
-                + "(principal) REFERENCES " + identityTable + "(principal) "
-                + ") ENGINE=INNODB;";
-            preKey = connection.prepareStatement(stmKey);
-            preKey.executeUpdate();
-
-        } catch (SQLException e) {
-            LOG.error("Error occurred while initialize MySQL backend." + e.toString());
-            throw new KrbException("Failed to create table in database. ", e);
-        } finally {
-            DbUtils.closeQuietly(preInitialize);
-            DbUtils.closeQuietly(preKdcRealm);
-            DbUtils.closeQuietly(resKdcRelam);
-            DbUtils.closeQuietly(preIdentity);
-            DbUtils.closeQuietly(preKey);
-            DbUtils.closeQuietly(connection);
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected void doStop() throws KrbException {
-        try {
-            closeConnection();
-            if (connection.isClosed()) {
-                LOG.info("Succeeded in closing connection with MySQL.");
-            }
-        } catch (SQLException e) {
-            LOG.error("Failed to close connection with MySQL.");
-            throw new KrbException("Failed to close connection with MySQL. ", e);
-        }
-    }
-
-    /**
-     * Close the connection for stop().
-     * @throws SQLException if SQLException handled
-     */
-    private void closeConnection() throws SQLException {
-        if (!connection.isClosed()) {
-            connection.close();
-        }
-    }
-
-    /**
-     * Convert a KerberosTime type object to a generalized time form of String.
-     * @param kerberosTime The kerberos time to convert
-     */
-    private String toGeneralizedTime(final KerberosTime kerberosTime) {
-        GeneralizedTime generalizedTime = new GeneralizedTime(kerberosTime.getValue());
-        return generalizedTime.toString();
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected KrbIdentity doAddIdentity(KrbIdentity identity) throws KrbException {
-        String principalName = identity.getPrincipalName();
-        int keyVersion = identity.getKeyVersion();
-        int kdcFlags = identity.getKdcFlags();
-        boolean disabled = identity.isDisabled();
-        boolean locked = identity.isLocked();
-        String createdTime = toGeneralizedTime(identity.getCreatedTime());
-        String expireTime = toGeneralizedTime(identity.getExpireTime());
-        Map<EncryptionType, EncryptionKey> keys = identity.getKeys();
-
-        PreparedStatement preIdentity = null;
-        PreparedStatement preKey = null;
-
-        KrbIdentity duplicateIdentity = doGetIdentity(principalName);
-        if (duplicateIdentity != null) {
-            LOG.warn("The identity maybe duplicate.");
-
-            return duplicateIdentity;
-        } else {
-            try {
-                startConnection();
-                connection.setAutoCommit(false);
-
-                // Insert identity to identity table
-                String stmIdentity = "insert into " + identityTable + " values(?, ?, ?, ?, ?, ?, ?)";
-                preIdentity = connection.prepareStatement(stmIdentity);
-                preIdentity.setString(1, principalName);
-                preIdentity.setInt(2, keyVersion);
-                preIdentity.setInt(3, kdcFlags);
-                preIdentity.setBoolean(4, disabled);
-                preIdentity.setBoolean(5, locked);
-                preIdentity.setString(6, createdTime);
-                preIdentity.setString(7, expireTime);
-                preIdentity.executeUpdate();
-
-                // Insert keys to key table
-                for (Map.Entry<EncryptionType, EncryptionKey> entry : keys.entrySet()) {
-                    String stmKey = "insert into " + keyInfoTable + " (key_type, kvno, key_value, principal)"
-                        + " values(?, ?, ?, ?)";
-                    preKey = connection.prepareStatement(stmKey);
-                    preKey.setString(1, entry.getKey().getName());
-                    preKey.setInt(2, entry.getValue().getKvno());
-                    preKey.setBlob(3, new SerialBlob(entry.getValue().getKeyData()));
-                    preKey.setString(4, principalName);
-                    preKey.executeUpdate();
-                }
-
-                connection.commit();
-                return identity;
-            } catch (SQLException e) {
-                try {
-                    LOG.info("Transaction is being rolled back.");
-                    connection.rollback();
-                } catch (SQLException ex) {
-                    throw new KrbException("Transaction roll back failed. ", ex);
-                }
-                LOG.error("Error occurred while adding identity.");
-                throw new KrbException("Failed to add identity. ", e);
-            } finally {
-                DbUtils.closeQuietly(preIdentity);
-                DbUtils.closeQuietly(preKey);
-                doStop();
-            }
-        }
-    }
-
-    /**
-     * Create kerberos time.
-     * @param generalizedTime generalized time
-     * @throws ParseException parse exception
-     */
-    private KerberosTime createKerberosTime(final String generalizedTime) throws ParseException {
-        long time = new GeneralizedTime(generalizedTime).getTime();
-        return new KerberosTime(time);
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected KrbIdentity doGetIdentity(final String principalName) throws KrbException {
-        KrbIdentity krbIdentity = new KrbIdentity(principalName);
-
-        PreparedStatement preIdentity = null;
-        ResultSet resIdentity = null;
-        PreparedStatement preKey = null;
-        ResultSet resKey = null;
-        try {
-            startConnection();
-
-            // Get identity from identity table
-            String stmIdentity = "SELECT * FROM " + identityTable + " where principal = ?";
-            preIdentity = connection.prepareStatement(stmIdentity);
-            preIdentity.setString(1, principalName);
-            resIdentity = preIdentity.executeQuery();
-
-            if (!resIdentity.next()) {
-                return null;
-            }
-
-            while (resIdentity.next()) {
-                krbIdentity.setKeyVersion(resIdentity.getInt("key_version"));
-                krbIdentity.setKdcFlags(resIdentity.getInt("kdc_flags"));
-                krbIdentity.setDisabled(resIdentity.getBoolean("disabled"));
-                krbIdentity.setLocked(resIdentity.getBoolean("locked"));
-                krbIdentity.setCreatedTime(createKerberosTime(resIdentity.getString("created_time")));
-                krbIdentity.setExpireTime(createKerberosTime(resIdentity.getString("expire_time")));
-            }
-
-            // Get keys from key table
-            List<EncryptionKey> keys = new ArrayList<>();
-            String stmKey = "SELECT * FROM " + keyInfoTable + " where principal = ?";
-            preKey = connection.prepareStatement(stmKey);
-            preKey.setString(1, principalName);
-            resKey = preKey.executeQuery();
-            while (resKey.next()) {
-                int kvno = resKey.getInt("kvno");
-                String keyType = resKey.getString("key_type");
-                EncryptionType eType = EncryptionType.fromName(keyType);
-                byte[] keyValue = resKey.getBytes("key_value");
-                EncryptionKey key = new EncryptionKey(eType, keyValue, kvno);
-                keys.add(key);
-            }
-
-            krbIdentity.addKeys(keys);
-            return krbIdentity;
-        } catch (SQLException e) {
-            LOG.error("Error occurred while getting identity.");
-            throw new KrbException("Failed to get identity. ", e);
-        } catch (ParseException e) {
-            throw new KrbException("Failed to get identity. ", e);
-        } finally {
-            DbUtils.closeQuietly(preIdentity);
-            DbUtils.closeQuietly(resIdentity);
-            DbUtils.closeQuietly(preKey);
-            DbUtils.closeQuietly(resKey);
-            doStop();
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected KrbIdentity doUpdateIdentity(KrbIdentity identity) throws KrbException {
-        String principalName = identity.getPrincipalName();
-        try {
-            doDeleteIdentity(principalName); // Delete former identity
-            doAddIdentity(identity); // Insert new identity
-        } catch (KrbException e) {
-            LOG.error("Error occurred while updating identity: " + principalName);
-            throw new KrbException("Failed to update identity. ", e);
-        }
-
-        return getIdentity(principalName);
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected void doDeleteIdentity(String principalName) throws KrbException {
-        PreparedStatement preKey = null;
-        PreparedStatement preIdentity = null;
-        try {
-            startConnection();
-            connection.setAutoCommit(false);
-
-            // Delete keys from key table
-            String stmKey = "DELETE FROM  " + keyInfoTable + " where principal = ?";
-            preKey = connection.prepareStatement(stmKey);
-            preKey.setString(1, principalName);
-            preKey.executeUpdate();
-
-            // Dlete identity from identity table
-            String stmIdentity = "DELETE FROM " + identityTable + " where principal = ? ";
-            preIdentity = connection.prepareStatement(stmIdentity);
-            preIdentity.setString(1, principalName);
-            preIdentity.executeUpdate();
-
-            connection.commit();
-        } catch (SQLException e) {
-            try {
-                LOG.info("Transaction is being rolled back.");
-                connection.rollback();
-            } catch (SQLException ex) {
-                throw new KrbException("Transaction roll back failed. ", ex);
-            }
-            LOG.error("Error occurred while deleting identity.");
-            throw new KrbException("Failed to delete identity. ", e);
-        } finally {
-            DbUtils.closeQuietly(preIdentity);
-            DbUtils.closeQuietly(preKey);
-            doStop();
-        }
-    }
-
-    /**
-     * {@inheritDoc}
-     */
-    @Override
-    protected Iterable<String> doGetIdentities() throws KrbException {
-        List<String> identityNames = new ArrayList<>();
-        PreparedStatement preSmt = null;
-        ResultSet result = null;
-        try {
-            startConnection();
-            String statement = "SELECT * FROM " + identityTable;
-            preSmt = connection.prepareStatement(statement);
-            result = preSmt.executeQuery();
-            while (result.next()) {
-                identityNames.add(result.getString("principal"));
-            }
-            result.close();
-            preSmt.close();
-        } catch (SQLException e) {
-            LOG.error("Error occurred while getting identities.");
-            throw new KrbException("Failed to get identities. ", e);
-        } finally {
-            DbUtils.closeQuietly(preSmt);
-            DbUtils.closeQuietly(result);
-            doStop();
-        }
-
-        return identityNames;
-    }
-}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/a8b1c28f/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
deleted file mode 100644
index 78ce1e9..0000000
--- a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package org.apache.hadoop.has.server.web;
-
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-import org.apache.hadoop.has.common.HasConfig;
-import org.apache.hadoop.has.common.HasException;
-import org.apache.hadoop.has.common.util.HasUtil;
-import org.apache.hadoop.has.server.HasServer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import java.io.File;
-import java.io.IOException;
-@Private
-@Unstable
-public class ConfFilter implements Filter {
-    public static final Logger LOG = LoggerFactory.getLogger(ConfFilter.class);
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-
-    }
-
-    @Override
-    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse,
-                         FilterChain filterChain) throws IOException, ServletException {
-
-        final HasServer hasServer = WebServer.getHasServerFromContext(
-                servletRequest.getServletContext());
-        HasConfig hasConfig;
-        try {
-            hasConfig = HasUtil.getHasConfig(
-                    new File(hasServer.getConfDir(), "has-server.conf"));
-            String isEnableConf = hasConfig.getEnableConf();
-            if (!isEnableConf.equals("true")) {
-                throw new RuntimeException("The kdc has started.");
-            }
-            filterChain.doFilter(servletRequest, servletResponse);
-        } catch (HasException e) {
-            LOG.error(e.getMessage());
-        }
-    }
-
-    @Override
-    public void destroy() {
-
-    }
-}
\ No newline at end of file