You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@directory.apache.org by pl...@apache.org on 2017/11/15 05:12:10 UTC

[05/10] directory-kerby git commit: Add the HAS project to Kerby.

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
new file mode 100644
index 0000000..589e092
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/admin/LocalHasAdmin.java
@@ -0,0 +1,382 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.admin;
+
+import org.apache.hadoop.has.common.HasAdmin;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.web.HostRoleType;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadmin;
+import org.apache.kerby.kerberos.kerb.admin.kadmin.local.LocalKadminImpl;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcSetting;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+import org.apache.kerby.kerberos.kerb.server.ServerSetting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+
+public class LocalHasAdmin implements HasAdmin {
+    public static final Logger LOG = LoggerFactory.getLogger(LocalHasAdmin.class);
+
+    private final ServerSetting serverSetting;
+    private File confDir;
+
+    public LocalHasAdmin(HasServer hasServer) throws KrbException {
+        if (hasServer.getKdcServer() == null) {
+            throw new RuntimeException("Could not get HAS KDC server, please start KDC first.");
+        }
+        this.serverSetting = hasServer.getKdcServer().getKdcSetting();
+    }
+
+    /**
+     * Construct with prepared conf dir.
+     *
+     * @param confDir The path of conf dir
+     * @throws KrbException e
+     */
+    public LocalHasAdmin(File confDir) throws KrbException {
+        this.confDir = confDir;
+        KdcConfig tmpKdcConfig = KdcUtil.getKdcConfig(confDir);
+        if (tmpKdcConfig == null) {
+            tmpKdcConfig = new KdcConfig();
+        }
+
+        BackendConfig tmpBackendConfig = KdcUtil.getBackendConfig(confDir);
+        if (tmpBackendConfig == null) {
+            tmpBackendConfig = new BackendConfig();
+        }
+
+        this.serverSetting = new KdcSetting(tmpKdcConfig, tmpBackendConfig);
+    }
+
+    @Override
+    public List<String> getPrincipals(String exp) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        List<String> princs = null;
+        LOG.info("The value of exp is : " + exp);
+        if (exp == null || exp.equals("")) {
+            try {
+                princs = kadmin.getPrincipals();
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        } else {
+            try {
+                princs = kadmin.getPrincipals(exp);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        return princs;
+    }
+
+    @Override
+    public void addPrincipal(String principal, String password) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new HasException("Value of principal is null.");
+        }
+        if (password == null || password.equals("")) {
+            try {
+                kadmin.addPrincipal(principal);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        } else {
+            try {
+                kadmin.addPrincipal(principal, password);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        LOG.info("Success to add principal :" + principal);
+    }
+
+    @Override
+    public void deletePrincipal(String principal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+             throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new IllegalArgumentException("Value of principal is null.");
+        }
+        try {
+            kadmin.deletePrincipal(principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to delete principal :" + principal);
+    }
+
+    @Override
+    public void renamePrincipal(String oldPrincipal, String newPrincipal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.renamePrincipal(oldPrincipal, newPrincipal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to rename principal : \"" + oldPrincipal
+                + "\" to \"" + newPrincipal + "\".");
+    }
+
+    @Override
+    public String addPrincByRole(String host, String role) throws HasException {
+        String result = "";
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String releam = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        if (princs == null) {
+            LOG.error("Cannot find the role of : " + role);
+            return "Cannot find the role of : " + role;
+        }
+        for (String princ : princs) {
+            try {
+                kadmin.addPrincipal(princ + releam);
+                LOG.info("Success to add princ :" + princ + releam);
+                result = result + "Success to add princ :" + princ + releam + "\n";
+            } catch (KrbException e) {
+                LOG.info(e.getMessage());
+                result = e.getMessage() + "\n";
+            }
+        }
+        return result;
+    }
+
+    @Override
+    public File getKeytabByHostAndRole(String host, String role) throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        File path = new File("/tmp/" + System.currentTimeMillis());
+        path.mkdirs();
+        File keytab = new File(path, role + "-" + host + ".keytab");
+        if (keytab.exists()) {
+            keytab.delete();
+        }
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        for (String princ : princs) {
+            try {
+                if (kadmin.getPrincipal(princ + realm) == null) {
+                    continue;
+                }
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+            try {
+                kadmin.exportKeytab(keytab, princ + realm);
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+        return keytab;
+    }
+
+    public void getKeytabByHostAndRole(String host, String role, File keytab) throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        String realm = "/" + host + "@" + kadmin.getKdcConfig().getKdcRealm();
+        if (keytab.exists()) {
+            keytab.delete();
+        }
+        String[] princs = HostRoleType.valueOf(role).getPrincs();
+        for (String princ : princs) {
+            try {
+                if (kadmin.getPrincipal(princ + realm) == null) {
+                    continue;
+                }
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+            try {
+                kadmin.exportKeytab(keytab, princ + realm);
+                System.out.println("Success to export keytab : " + keytab.getAbsolutePath());
+            } catch (KrbException e) {
+                throw new HasException(e);
+            }
+        }
+    }
+
+    @Override
+    public List<String> getPrincipals() throws HasException {
+        LocalKadmin kadmin;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            return kadmin.getPrincipals();
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    public KrbIdentity getPrincipal(String principalName) throws HasException {
+        LocalKadmin kadmin;
+        KrbIdentity identity;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            identity = kadmin.getPrincipal(principalName);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        return identity;
+    }
+
+    @Override
+    public void addPrincipal(String principal) throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        if (principal == null) {
+            throw new HasException("Value of principal is null.");
+        }
+        try {
+            kadmin.addPrincipal(principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        LOG.info("Success to add principal :" + principal);
+    }
+
+    @Override
+    public String getHadminPrincipal() {
+        return KrbUtil.makeKadminPrincipal(serverSetting.getKdcRealm()).getName();
+    }
+
+    /**
+     * get size of principal
+     */
+    @Override
+    public int size() throws HasException {
+        return this.getPrincipals().size();
+    }
+
+    @Override
+    public void setEnableOfConf(String isEnable) throws HasException {
+        File hasConf = new File(confDir, "has-server.conf");
+        if (!hasConf.exists()) {
+            System.err.println("has-server.conf is not exists.");
+            return;
+        }
+        try {
+            HasUtil.setEnableConf(hasConf, isEnable);
+        } catch (IOException e) {
+            System.err.println(e.getMessage());
+            return;
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, String principal)
+        throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.exportKeytab(keytabFile, principal);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    @Override
+    public void exportKeytab(File keytabFile, List<String> principals)
+            throws HasException {
+        LocalKadmin kadmin = null;
+        try {
+            kadmin = new LocalKadminImpl(serverSetting);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+        try {
+            kadmin.exportKeytab(keytabFile, principals);
+        } catch (KrbException e) {
+            throw new HasException(e);
+        }
+    }
+
+    public void getHostRoles() {
+        for (HostRoleType role : HostRoleType.values()) {
+            System.out.print("\tHostRole: " + role.getName()
+                    + ", PrincipalNames: ");
+            String[] princs = role.getPrincs();
+            for (int j = 0; j < princs.length; j++) {
+                System.out.print(princs[j]);
+                if (j == princs.length - 1) {
+                    System.out.println();
+                } else {
+                    System.out.print(", ");
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
new file mode 100644
index 0000000..f880c48
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/HasKdcHandler.java
@@ -0,0 +1,315 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.kdc;
+
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.kerby.kerberos.kerb.KrbCodec;
+import org.apache.kerby.kerberos.kerb.KrbErrorCode;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.client.KrbContext;
+import org.apache.kerby.kerberos.kerb.common.EncryptionUtil;
+import org.apache.kerby.kerberos.kerb.common.KrbUtil;
+import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
+import org.apache.kerby.kerberos.kerb.server.KdcContext;
+import org.apache.kerby.kerberos.kerb.server.KdcRecoverableException;
+import org.apache.kerby.kerberos.kerb.server.KdcServer;
+import org.apache.kerby.kerberos.kerb.server.preauth.PreauthHandler;
+import org.apache.kerby.kerberos.kerb.server.request.AsRequest;
+import org.apache.kerby.kerberos.kerb.server.request.KdcRequest;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.AuthToken;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.apache.kerby.kerberos.kerb.type.base.HostAddress;
+import org.apache.kerby.kerberos.kerb.type.base.HostAddresses;
+import org.apache.kerby.kerberos.kerb.type.base.KrbError;
+import org.apache.kerby.kerberos.kerb.type.base.KrbMessage;
+import org.apache.kerby.kerberos.kerb.type.base.KrbToken;
+import org.apache.kerby.kerberos.kerb.type.base.PrincipalName;
+import org.apache.kerby.kerberos.kerb.type.base.TokenFormat;
+import org.apache.kerby.kerberos.kerb.type.kdc.AsReq;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcOption;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcOptions;
+import org.apache.kerby.kerberos.kerb.type.kdc.KdcReqBody;
+import org.apache.kerby.kerberos.kerb.type.pa.PaData;
+import org.apache.kerby.kerberos.kerb.type.pa.PaDataEntry;
+import org.apache.kerby.kerberos.kerb.type.pa.PaDataType;
+import org.apache.kerby.kerberos.kerb.type.pa.token.PaTokenRequest;
+import org.apache.kerby.kerberos.kerb.type.pa.token.TokenInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class HasKdcHandler {
+    private static final Logger LOG = LoggerFactory.getLogger(HasKdcHandler.class);
+
+    private KdcContext kdcContext;
+    private KrbContext krbContext;
+    private KdcServer kdcServer;
+
+    /**
+     * Constructor with has server.
+     *
+     * @param hasServer has server
+     */
+    public HasKdcHandler(HasServer hasServer) {
+        this.krbContext = new KrbContext();
+        this.krbContext.init(hasServer.getKrbSetting());
+        this.kdcServer = hasServer.getKdcServer();
+        prepareHandler(kdcServer);
+    }
+
+    public KrbContext getKrbContext() {
+        return krbContext;
+    }
+
+    public KdcContext getKdcContext() {
+        return kdcContext;
+    }
+
+    private KdcServer getKdcServer() {
+        return kdcServer;
+    }
+
+    private void prepareHandler(KdcServer kdcServer) {
+        this.kdcContext = new KdcContext(kdcServer.getKdcSetting());
+        this.kdcContext.setIdentityService(kdcServer.getIdentityService());
+        PreauthHandler preauthHandler = new PreauthHandler();
+        preauthHandler.init();
+        this.kdcContext.setPreauthHandler(preauthHandler);
+    }
+
+    private String getAudience(String name) {
+        return name + "/" + getKdcContext().getKdcRealm() + "@" + getKdcContext().getKdcRealm();
+    }
+
+    public KrbMessage getResponse(AuthToken authToken, String passPhrase) {
+        KrbMessage krbMessage = null;
+        try {
+            krbMessage = handleMessage(authToken, passPhrase);
+        } catch (KrbException e) {
+            LOG.error("Failed to handle message. " + e.getMessage());
+        }
+        return krbMessage;
+    }
+
+    /**
+     * Process the client request message.
+     */
+    public KrbMessage handleMessage(AuthToken authToken, String passPhrase) throws KrbException {
+
+        // set the audiences
+        List<String> auds = new ArrayList<String>();
+        String audience = getAudience("krbtgt");
+        auds.add(audience);
+        authToken.setAudiences(auds);
+
+        AsReq asReq = createAsReq(authToken);
+        KdcRequest kdcRequest = new AsRequest(asReq, kdcContext);
+        kdcRequest.setHttps(true);
+        List<EncryptionType> requestedTypes = getEncryptionTypes();
+        EncryptionType bestType = EncryptionUtil.getBestEncryptionType(requestedTypes,
+                kdcContext.getConfig().getEncryptionTypes());
+
+        if (bestType == null) {
+            LOG.error("Can't get the best encryption type.");
+            throw new KrbException(KrbErrorCode.KDC_ERR_ETYPE_NOSUPP);
+        }
+
+        PrincipalName clientPrincipal = new PrincipalName(authToken.getSubject());
+        String clientRealm = asReq.getReqBody().getRealm();
+        if (clientRealm == null || clientRealm.isEmpty()) {
+            clientRealm = getKdcContext().getKdcRealm();
+        }
+        clientPrincipal.setRealm(clientRealm);
+
+        // Set the client key
+        EncryptionKey clientKey = HasUtil.getClientKey(clientPrincipal.getName(),
+            passPhrase, bestType);
+        kdcRequest.setClientKey(clientKey);
+
+        // Set the token issuers
+        getKdcServer().getKdcConfig().setString(KdcConfigKey.TOKEN_ISSUERS, "has");
+
+        KrbMessage krbResponse;
+
+        try {
+            kdcRequest.process();
+            krbResponse = kdcRequest.getReply();
+        } catch (KrbException e) {
+            LOG.error("Error occurred when request tgt. " + e.getMessage());
+            if (e instanceof KdcRecoverableException) {
+                krbResponse = handleRecoverableException(
+                        (KdcRecoverableException) e, kdcRequest);
+            } else {
+                KrbError krbError = new KrbError();
+                krbError.setStime(KerberosTime.now());
+                krbError.setSusec(100);
+                if (e.getKrbErrorCode() != null) {
+                    krbError.setErrorCode(e.getKrbErrorCode());
+                } else {
+                    krbError.setErrorCode(KrbErrorCode.UNKNOWN_ERR);
+                }
+                krbError.setCrealm(kdcContext.getKdcRealm());
+                if (kdcRequest.getClientPrincipal() != null) {
+                    krbError.setCname(kdcRequest.getClientPrincipal());
+                }
+                krbError.setRealm(kdcContext.getKdcRealm());
+                if (kdcRequest.getServerPrincipal() != null) {
+                    krbError.setSname(kdcRequest.getServerPrincipal());
+                } else {
+                    PrincipalName serverPrincipal = kdcRequest.getKdcReq().getReqBody().getSname();
+                    serverPrincipal.setRealm(kdcRequest.getKdcReq().getReqBody().getRealm());
+                    krbError.setSname(serverPrincipal);
+                }
+                if (KrbErrorCode.KRB_AP_ERR_BAD_INTEGRITY.equals(e.getKrbErrorCode())) {
+                    krbError.setEtext("PREAUTH_FAILED");
+                } else {
+                    krbError.setEtext(e.getMessage());
+                }
+                krbResponse = krbError;
+            }
+        }
+        return krbResponse;
+    }
+
+    /**
+     * Process the recoverable exception.
+     *
+     * @param e The exception return by kdc
+     * @param kdcRequest kdc request
+     * @return The KrbError
+     */
+    private KrbMessage handleRecoverableException(KdcRecoverableException e,
+                                                  KdcRequest kdcRequest)
+            throws KrbException {
+        LOG.info("KRB error occurred while processing request:"
+                + e.getMessage());
+
+        KrbError error = e.getKrbError();
+        error.setStime(KerberosTime.now());
+        error.setSusec(100);
+        error.setErrorCode(e.getKrbError().getErrorCode());
+        error.setRealm(kdcContext.getKdcRealm());
+        if (kdcRequest != null) {
+            error.setSname(kdcRequest.getKdcReq().getReqBody().getCname());
+        } else {
+            error.setSname(new PrincipalName("NONE"));
+        }
+        error.setEtext(e.getMessage());
+        return error;
+    }
+
+    public AsReq createAsReq(AuthToken authToken) throws KrbException {
+        AsReq asReq = new AsReq();
+        KdcReqBody body = makeReqBody();
+        asReq.setReqBody(body);
+
+        PaTokenRequest tokenPa = new PaTokenRequest();
+        KrbToken krbToken = new KrbToken(authToken, TokenFormat.JWT);
+        tokenPa.setToken(krbToken);
+        TokenInfo info = new TokenInfo();
+        info.setTokenVendor(authToken.getIssuer());
+        tokenPa.setTokenInfo(info);
+
+        PaDataEntry paDataEntry = new PaDataEntry();
+        paDataEntry.setPaDataType(PaDataType.TOKEN_REQUEST);
+        paDataEntry.setPaDataValue(KrbCodec.encode(tokenPa));
+
+        PaData paData = new PaData();
+        paData.addElement(paDataEntry);
+        asReq.setPaData(paData);
+        return asReq;
+    }
+
+    /**
+     * Create the KdcReqBody
+     *
+     * @return KdcReqBody
+     *
+     * @throws KrbException e
+     */
+     protected KdcReqBody makeReqBody() throws KrbException {
+        KdcReqBody body = new KdcReqBody();
+
+        long startTime = System.currentTimeMillis();
+        body.setFrom(new KerberosTime(startTime));
+
+         // set the client principal as null
+        PrincipalName cName = null;
+        body.setCname(cName);
+
+        body.setRealm(getKrbContext().getKrbSetting().getKdcRealm());
+
+        PrincipalName sName = getServerPrincipal();
+        body.setSname(sName);
+
+        body.setTill(new KerberosTime(startTime + krbContext.getTicketValidTime()));
+
+        int nonce = krbContext.generateNonce();
+        body.setNonce(nonce);
+//        setChosenNonce(nonce);
+
+        body.setKdcOptions(getKdcOptions());
+
+        HostAddresses addresses = getHostAddresses();
+        if (addresses != null) {
+            body.setAddresses(addresses);
+        }
+
+        body.setEtypes(getEncryptionTypes());
+
+        return body;
+    }
+
+    private PrincipalName getServerPrincipal() {
+        return KrbUtil.makeTgsPrincipal(getKrbContext().getKrbSetting().getKdcRealm());
+    }
+
+    private KdcOptions getKdcOptions() {
+        KdcOptions kdcOptions = new KdcOptions();
+        // By default enforce these flags
+        kdcOptions.setFlag(KdcOption.FORWARDABLE);
+        kdcOptions.setFlag(KdcOption.PROXIABLE);
+        kdcOptions.setFlag(KdcOption.RENEWABLE_OK);
+        return kdcOptions;
+    }
+
+    public HostAddresses getHostAddresses() {
+        List<HostAddress> hostAddresses = new ArrayList<HostAddress>();
+        HostAddresses addresses = null;
+        //empty
+        if (!hostAddresses.isEmpty()) {
+            addresses = new HostAddresses();
+            for (HostAddress ha : hostAddresses) {
+                addresses.addElement(ha);
+            }
+        }
+        return addresses;
+    }
+
+    public List<EncryptionType> getEncryptionTypes() {
+        List<EncryptionType> encryptionTypes = krbContext.getConfig().getEncryptionTypes();
+        return EncryptionUtil.orderEtypesByStrength(encryptionTypes);
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
new file mode 100644
index 0000000..3f397fb
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLConfKey.java
@@ -0,0 +1,52 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.kdc;
+
+import org.apache.kerby.config.ConfigKey;
+
+/**
+ * Define all the MySQL backend related configuration items with default values.
+ */
+public enum MySQLConfKey implements ConfigKey {
+    MYSQL_DRIVER("com.mysql.jdbc.Driver"),
+    MYSQL_URL("jdbc:mysql://127.0.0.1:3306/mysqlbackend"),
+    MYSQL_USER("root"),
+    MYSQL_PASSWORD("passwd");
+
+    private Object defaultValue;
+
+    MySQLConfKey() {
+        this.defaultValue = null;
+    }
+
+    MySQLConfKey(Object defaultValue) {
+        this.defaultValue = defaultValue;
+    }
+
+    @Override
+    public String getPropertyKey() {
+        return name().toLowerCase();
+    }
+
+    @Override
+    public Object getDefaultValue() {
+        return this.defaultValue;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
new file mode 100644
index 0000000..034704a
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/kdc/MySQLIdentityBackend.java
@@ -0,0 +1,426 @@
+/**
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ *
+ */
+package org.apache.hadoop.has.server.kdc;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.directory.api.util.GeneralizedTime;
+import org.apache.kerby.config.Config;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.identity.KrbIdentity;
+import org.apache.kerby.kerberos.kerb.identity.backend.AbstractIdentityBackend;
+import org.apache.kerby.kerberos.kerb.type.KerberosTime;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
+import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import javax.sql.rowset.serial.SerialBlob;
+import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
+import java.text.ParseException;
+
+/**
+ * A MySQL based backend implementation.
+ */
+public class MySQLIdentityBackend extends AbstractIdentityBackend {
+    private Connection connection;
+    private String driver;
+    private String url;
+    private String user;
+    private String password;
+    private static final Logger LOG = LoggerFactory.getLogger(MySQLIdentityBackend.class);
+    private String identityTable;
+    private String keyInfoTable;
+
+    /**
+     * Constructing an instance using specified config that contains anything
+     * to be used to initialize an MySQL Backend.
+     * @param config . The config is used to config the backend.
+     */
+    public MySQLIdentityBackend(final Config config) {
+        setConfig(config);
+    }
+
+    public MySQLIdentityBackend() { }
+
+    /**
+     * Start the MySQL connection.
+     */
+    private void startConnection() throws KrbException {
+        try {
+            Class.forName(driver);
+            connection = DriverManager.getConnection(url, user, password);
+            if (!connection.isClosed()) {
+                LOG.info("Succeeded in connecting to MySQL.");
+            }
+        } catch (ClassNotFoundException e) {
+            throw new KrbException("JDBC Driver Class not found. ", e);
+        } catch (SQLException e) {
+            throw new KrbException("Failed to connecting to MySQL. ", e);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doInitialize() throws KrbException {
+        LOG.info("Initializing the MySQL identity backend.");
+        driver = getConfig().getString(MySQLConfKey.MYSQL_DRIVER, true);
+        user = getConfig().getString(MySQLConfKey.MYSQL_USER, true);
+        password = getConfig().getString(MySQLConfKey.MYSQL_PASSWORD, true);
+
+        String urlString = getConfig().getString(MySQLConfKey.MYSQL_URL, true);
+        if (urlString == null || urlString.isEmpty()) {
+            urlString = getBackendConfig().getString(MySQLConfKey.MYSQL_URL, true);
+        }
+        url = urlString;
+
+        PreparedStatement preInitialize = null;
+        PreparedStatement preKdcRealm = null;
+        ResultSet resKdcRelam = null;
+        PreparedStatement preIdentity = null;
+        PreparedStatement preKey = null;
+        try {
+            startConnection();
+
+            // Set initialized for kdc config
+            String stmInitialize = "UPDATE `kdc_config` SET initialized = true WHERE id = 1";
+            preInitialize = connection.prepareStatement(stmInitialize);
+            preInitialize.executeUpdate();
+
+            // Get identity table name according to realm of kdc
+            String stmKdcRealm = "SELECT realm FROM `kdc_config`";
+            preKdcRealm = connection.prepareStatement(stmKdcRealm);
+            resKdcRelam = preKdcRealm.executeQuery();
+            if (resKdcRelam.next()) {
+                String realm = resKdcRelam.getString("realm").toLowerCase();
+                identityTable = "`" + realm + "_identity" + "`";
+                keyInfoTable = "`" + realm + "_key" + "`";
+            } else {
+                throw new KrbException("Failed to get kdc config.");
+            }
+
+            // Create identity table
+            String stmIdentity = "CREATE TABLE IF NOT EXISTS " + identityTable
+                + " (principal varchar(255) NOT NULL, key_version INTEGER "
+                + "DEFAULT 1, kdc_flags INTEGER DEFAULT 0, disabled bool "
+                + "DEFAULT NULL, locked bool DEFAULT NULL, expire_time "
+                + "VARCHAR(255) DEFAULT NULL, created_time VARCHAR(255) "
+                + "DEFAULT NULL, PRIMARY KEY (principal) ) ENGINE=INNODB;";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.executeUpdate();
+
+            // Create key table
+            String stmKey = "CREATE TABLE IF NOT EXISTS " + keyInfoTable
+                + " (key_id INTEGER NOT NULL AUTO_INCREMENT, key_type "
+                + "VARCHAR(255) DEFAULT NULL, kvno INTEGER DEFAULT -1, "
+                + "key_value BLOB DEFAULT NULL, principal VARCHAR(255) NOT NULL,"
+                + "PRIMARY KEY (key_id), INDEX (principal), FOREIGN KEY "
+                + "(principal) REFERENCES " + identityTable + "(principal) "
+                + ") ENGINE=INNODB;";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.executeUpdate();
+
+        } catch (SQLException e) {
+            LOG.error("Error occurred while initialize MySQL backend." + e.toString());
+            throw new KrbException("Failed to create table in database. ", e);
+        } finally {
+            DbUtils.closeQuietly(preInitialize);
+            DbUtils.closeQuietly(preKdcRealm);
+            DbUtils.closeQuietly(resKdcRelam);
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(preKey);
+            DbUtils.closeQuietly(connection);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doStop() throws KrbException {
+        try {
+            closeConnection();
+            if (connection.isClosed()) {
+                LOG.info("Succeeded in closing connection with MySQL.");
+            }
+        } catch (SQLException e) {
+            LOG.error("Failed to close connection with MySQL.");
+            throw new KrbException("Failed to close connection with MySQL. ", e);
+        }
+    }
+
+    /**
+     * Close the connection for stop().
+     * @throws SQLException if SQLException handled
+     */
+    private void closeConnection() throws SQLException {
+        if (!connection.isClosed()) {
+            connection.close();
+        }
+    }
+
+    /**
+     * Convert a KerberosTime type object to a generalized time form of String.
+     * @param kerberosTime The kerberos time to convert
+     */
+    private String toGeneralizedTime(final KerberosTime kerberosTime) {
+        GeneralizedTime generalizedTime = new GeneralizedTime(kerberosTime.getValue());
+        return generalizedTime.toString();
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doAddIdentity(KrbIdentity identity) throws KrbException {
+        String principalName = identity.getPrincipalName();
+        int keyVersion = identity.getKeyVersion();
+        int kdcFlags = identity.getKdcFlags();
+        boolean disabled = identity.isDisabled();
+        boolean locked = identity.isLocked();
+        String createdTime = toGeneralizedTime(identity.getCreatedTime());
+        String expireTime = toGeneralizedTime(identity.getExpireTime());
+        Map<EncryptionType, EncryptionKey> keys = identity.getKeys();
+
+        PreparedStatement preIdentity = null;
+        PreparedStatement preKey = null;
+
+        KrbIdentity duplicateIdentity = doGetIdentity(principalName);
+        if (duplicateIdentity != null) {
+            LOG.warn("The identity maybe duplicate.");
+
+            return duplicateIdentity;
+        } else {
+            try {
+                startConnection();
+                connection.setAutoCommit(false);
+
+                // Insert identity to identity table
+                String stmIdentity = "insert into " + identityTable + " values(?, ?, ?, ?, ?, ?, ?)";
+                preIdentity = connection.prepareStatement(stmIdentity);
+                preIdentity.setString(1, principalName);
+                preIdentity.setInt(2, keyVersion);
+                preIdentity.setInt(3, kdcFlags);
+                preIdentity.setBoolean(4, disabled);
+                preIdentity.setBoolean(5, locked);
+                preIdentity.setString(6, createdTime);
+                preIdentity.setString(7, expireTime);
+                preIdentity.executeUpdate();
+
+                // Insert keys to key table
+                for (Map.Entry<EncryptionType, EncryptionKey> entry : keys.entrySet()) {
+                    String stmKey = "insert into " + keyInfoTable + " (key_type, kvno, key_value, principal)"
+                        + " values(?, ?, ?, ?)";
+                    preKey = connection.prepareStatement(stmKey);
+                    preKey.setString(1, entry.getKey().getName());
+                    preKey.setInt(2, entry.getValue().getKvno());
+                    preKey.setBlob(3, new SerialBlob(entry.getValue().getKeyData()));
+                    preKey.setString(4, principalName);
+                    preKey.executeUpdate();
+                }
+
+                connection.commit();
+                return identity;
+            } catch (SQLException e) {
+                try {
+                    LOG.info("Transaction is being rolled back.");
+                    connection.rollback();
+                } catch (SQLException ex) {
+                    throw new KrbException("Transaction roll back failed. ", ex);
+                }
+                LOG.error("Error occurred while adding identity.");
+                throw new KrbException("Failed to add identity. ", e);
+            } finally {
+                DbUtils.closeQuietly(preIdentity);
+                DbUtils.closeQuietly(preKey);
+                doStop();
+            }
+        }
+    }
+
+    /**
+     * Create kerberos time.
+     * @param generalizedTime generalized time
+     * @throws ParseException parse exception
+     */
+    private KerberosTime createKerberosTime(final String generalizedTime) throws ParseException {
+        long time = new GeneralizedTime(generalizedTime).getTime();
+        return new KerberosTime(time);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doGetIdentity(final String principalName) throws KrbException {
+        KrbIdentity krbIdentity = new KrbIdentity(principalName);
+
+        PreparedStatement preIdentity = null;
+        ResultSet resIdentity = null;
+        PreparedStatement preKey = null;
+        ResultSet resKey = null;
+        try {
+            startConnection();
+
+            // Get identity from identity table
+            String stmIdentity = "SELECT * FROM " + identityTable + " where principal = ?";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.setString(1, principalName);
+            resIdentity = preIdentity.executeQuery();
+
+            if (!resIdentity.next()) {
+                return null;
+            }
+
+            while (resIdentity.next()) {
+                krbIdentity.setKeyVersion(resIdentity.getInt("key_version"));
+                krbIdentity.setKdcFlags(resIdentity.getInt("kdc_flags"));
+                krbIdentity.setDisabled(resIdentity.getBoolean("disabled"));
+                krbIdentity.setLocked(resIdentity.getBoolean("locked"));
+                krbIdentity.setCreatedTime(createKerberosTime(resIdentity.getString("created_time")));
+                krbIdentity.setExpireTime(createKerberosTime(resIdentity.getString("expire_time")));
+            }
+
+            // Get keys from key table
+            List<EncryptionKey> keys = new ArrayList<>();
+            String stmKey = "SELECT * FROM " + keyInfoTable + " where principal = ?";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.setString(1, principalName);
+            resKey = preKey.executeQuery();
+            while (resKey.next()) {
+                int kvno = resKey.getInt("kvno");
+                String keyType = resKey.getString("key_type");
+                EncryptionType eType = EncryptionType.fromName(keyType);
+                byte[] keyValue = resKey.getBytes("key_value");
+                EncryptionKey key = new EncryptionKey(eType, keyValue, kvno);
+                keys.add(key);
+            }
+
+            krbIdentity.addKeys(keys);
+            return krbIdentity;
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting identity.");
+            throw new KrbException("Failed to get identity. ", e);
+        } catch (ParseException e) {
+            throw new KrbException("Failed to get identity. ", e);
+        } finally {
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(resIdentity);
+            DbUtils.closeQuietly(preKey);
+            DbUtils.closeQuietly(resKey);
+            doStop();
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected KrbIdentity doUpdateIdentity(KrbIdentity identity) throws KrbException {
+        String principalName = identity.getPrincipalName();
+        try {
+            doDeleteIdentity(principalName); // Delete former identity
+            doAddIdentity(identity); // Insert new identity
+        } catch (KrbException e) {
+            LOG.error("Error occurred while updating identity: " + principalName);
+            throw new KrbException("Failed to update identity. ", e);
+        }
+
+        return getIdentity(principalName);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected void doDeleteIdentity(String principalName) throws KrbException {
+        PreparedStatement preKey = null;
+        PreparedStatement preIdentity = null;
+        try {
+            startConnection();
+            connection.setAutoCommit(false);
+
+            // Delete keys from key table
+            String stmKey = "DELETE FROM  " + keyInfoTable + " where principal = ?";
+            preKey = connection.prepareStatement(stmKey);
+            preKey.setString(1, principalName);
+            preKey.executeUpdate();
+
+            // Dlete identity from identity table
+            String stmIdentity = "DELETE FROM " + identityTable + " where principal = ? ";
+            preIdentity = connection.prepareStatement(stmIdentity);
+            preIdentity.setString(1, principalName);
+            preIdentity.executeUpdate();
+
+            connection.commit();
+        } catch (SQLException e) {
+            try {
+                LOG.info("Transaction is being rolled back.");
+                connection.rollback();
+            } catch (SQLException ex) {
+                throw new KrbException("Transaction roll back failed. ", ex);
+            }
+            LOG.error("Error occurred while deleting identity.");
+            throw new KrbException("Failed to delete identity. ", e);
+        } finally {
+            DbUtils.closeQuietly(preIdentity);
+            DbUtils.closeQuietly(preKey);
+            doStop();
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    protected Iterable<String> doGetIdentities() throws KrbException {
+        List<String> identityNames = new ArrayList<>();
+        PreparedStatement preSmt = null;
+        ResultSet result = null;
+        try {
+            startConnection();
+            String statement = "SELECT * FROM " + identityTable;
+            preSmt = connection.prepareStatement(statement);
+            result = preSmt.executeQuery();
+            while (result.next()) {
+                identityNames.add(result.getString("principal"));
+            }
+            result.close();
+            preSmt.close();
+        } catch (SQLException e) {
+            LOG.error("Error occurred while getting identities.");
+            throw new KrbException("Failed to get identities. ", e);
+        } finally {
+            DbUtils.closeQuietly(preSmt);
+            DbUtils.closeQuietly(result);
+            doStop();
+        }
+
+        return identityNames;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
new file mode 100644
index 0000000..78ce1e9
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/ConfFilter.java
@@ -0,0 +1,54 @@
+package org.apache.hadoop.has.server.web;
+
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import java.io.File;
+import java.io.IOException;
+@Private
+@Unstable
+public class ConfFilter implements Filter {
+    public static final Logger LOG = LoggerFactory.getLogger(ConfFilter.class);
+    @Override
+    public void init(FilterConfig filterConfig) throws ServletException {
+
+    }
+
+    @Override
+    public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse,
+                         FilterChain filterChain) throws IOException, ServletException {
+
+        final HasServer hasServer = WebServer.getHasServerFromContext(
+                servletRequest.getServletContext());
+        HasConfig hasConfig;
+        try {
+            hasConfig = HasUtil.getHasConfig(
+                    new File(hasServer.getConfDir(), "has-server.conf"));
+            String isEnableConf = hasConfig.getEnableConf();
+            if (!isEnableConf.equals("true")) {
+                throw new RuntimeException("The kdc has started.");
+            }
+            filterChain.doFilter(servletRequest, servletResponse);
+        } catch (HasException e) {
+            LOG.error(e.getMessage());
+        }
+    }
+
+    @Override
+    public void destroy() {
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
new file mode 100644
index 0000000..82bb129
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/HostRoleType.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public enum HostRoleType {
+    HDFS("HDFS", new String[]{"HTTP", "hdfs"}),
+    YARN("YARN", new String[]{"yarn"}),
+    MAPRED("MAPRED", new String[]{"mapred"}),
+    HBASE("HBASE", new String[]{"hbase"}),
+    ZOOKEEPER("ZOOKEEPER", new String[]{"zookeeper"}),
+    SPARK("SPARK", new String[]{"spark"}),
+    HIVE("HIVE", new String[]{"hive"}),
+    OOZIE("OOZIE", new String[]{"oozie"}),
+    HUE("HUE", new String[]{"hue"});
+
+    private String name;
+    private String[] princs;
+
+    HostRoleType(String name, String[] princs) {
+        this.name = name;
+        this.princs = princs;
+    }
+
+    public String[] getPrincs() {
+        return princs;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
new file mode 100644
index 0000000..bd0a1ca
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebConfigKey.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.has.server.web;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+
+/** 
+ * This class contains constants for configuration keys and default values
+ * used in hdfs.
+ */
+@InterfaceAudience.Private
+public class WebConfigKey {
+
+  public static final int HAS_HTTP_PORT_DEFAULT = 9870;
+  public static final String HAS_HTTP_HOST_DEFAULT = "0.0.0.0";
+  public static final String HAS_HTTP_ADDRESS_KEY = "has.http-address";
+  public static final String HAS_HTTP_ADDRESS_DEFAULT = HAS_HTTP_HOST_DEFAULT + ":" + HAS_HTTP_PORT_DEFAULT;
+
+  public static final String HAS_HTTPS_BIND_HOST_KEY = "has.https-bind-host";
+  public static final int HAS_HTTPS_PORT_DEFAULT = 9871;
+  public static final String HAS_HTTPS_HOST_DEFAULT = "0.0.0.0";
+  public static final String HAS_HTTPS_ADDRESS_KEY = "has.https-address";
+  public static final String HAS_HTTPS_ADDRESS_DEFAULT = HAS_HTTPS_HOST_DEFAULT + ":" + HAS_HTTPS_PORT_DEFAULT;
+  public static final String HAS_HTTP_POLICY_KEY = "has.http.policy";
+  public static final String HAS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTPS_ONLY.name();
+
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY = "has.https.server.keystore.resource";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT = "ssl-server.xml";
+  public static final String HAS_SERVER_HTTPS_KEYPASSWORD_KEY = "ssl.server.keystore.keypassword";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY = "ssl.server.keystore.password";
+  public static final String HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY = "ssl.server.keystore.location";
+  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY = "ssl.server.truststore.location";
+  public static final String HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY = "ssl.server.truststore.password";
+  public static final String HAS_CLIENT_HTTPS_NEED_AUTH_KEY = "has.client.https.need-auth";
+  public static final boolean HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT = false;
+
+  public static final String HAS_AUTHENTICATION_FILTER_KEY = "has.web.authentication.filter";
+  public static final String HAS_AUTHENTICATION_FILTER_DEFAULT = AuthenticationFilter.class.getName();
+
+  public static final String HAS_AUTHENTICATION_FILTER_AUTH_TYPE = "has.authentication.filter.auth.type";
+  public static final String HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY = "has.authentication.kerberos.principal";
+  public static final String HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY = "has.authentication.kerberos.keytab";
+  public static final String HAS_AUTHENTICATION_KERBEROS_NAME_RULES = "has.authentication.kerberos.name.rules";
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
new file mode 100644
index 0000000..3e5f832
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/WebServer.java
@@ -0,0 +1,348 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.has.server.web;
+
+import org.apache.hadoop.HadoopIllegalArgumentException;
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.web.rest.HasApi;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.ServletContext;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+
+public class WebServer {
+    public static final Logger LOG = LoggerFactory.getLogger(WebServer.class);
+
+    private HttpServer2 httpServer;
+    private final HasConfig conf;
+
+    private InetSocketAddress httpAddress;
+    private InetSocketAddress httpsAddress;
+
+    protected static final String HAS_SERVER_ATTRIBUTE_KEY = "hasserver";
+
+    public WebServer(HasConfig conf) {
+        this.conf = conf;
+    }
+
+    public HasConfig getConf() {
+        return conf;
+    }
+
+    private void init() {
+
+        final String pathSpec = "/has/v1/*";
+
+        // add has packages
+        httpServer.addJerseyResourcePackage(HasApi.class
+                .getPackage().getName(),
+            pathSpec);
+    }
+
+    public void defineFilter() {
+        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
+        if (authType.equals("kerberos")) {
+            // add authentication filter for webhdfs
+            final String className = conf.getString(
+                WebConfigKey.HAS_AUTHENTICATION_FILTER_KEY,
+                WebConfigKey.HAS_AUTHENTICATION_FILTER_DEFAULT);
+
+            final String name = className;
+
+            Map<String, String> params = getAuthFilterParams(conf);
+
+            String adminPathSpec = "/has/v1/admin/*";
+            HttpServer2.defineFilter(httpServer.getWebAppContext(), name, className,
+                params, new String[]{adminPathSpec});
+            HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+                + ")");
+        }
+    }
+
+    public void defineConfFilter() {
+        String confFilterName = ConfFilter.class.getName();
+        String confPath = "/has/v1/conf/*";
+        HttpServer2.defineFilter(httpServer.getWebAppContext(), confFilterName, confFilterName,
+                getAuthFilterParams(conf), new String[]{confPath});
+        HttpServer2.LOG.info("Added filter '" + confFilterName + "' (class=" + confFilterName
+                + ")");
+    }
+
+    private Map<String, String> getAuthFilterParams(HasConfig conf) {
+        Map<String, String> params = new HashMap<String, String>();
+
+        String authType = conf.getString(WebConfigKey.HAS_AUTHENTICATION_FILTER_AUTH_TYPE);
+        if (authType != null && !authType.isEmpty()) {
+            params.put(AuthenticationFilter.AUTH_TYPE, authType);
+        }
+        String principal = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
+        if (principal != null && !principal.isEmpty()) {
+            try {
+                principal = SecurityUtil.getServerPrincipal(principal,
+                    getHttpsAddress().getHostName());
+            } catch (IOException e) {
+                LOG.warn("Errors occurred when get server principal. " + e.getMessage());
+            }
+            params.put(KerberosAuthenticationHandler.PRINCIPAL, principal);
+        }
+        String keytab = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
+        if (keytab != null && !keytab.isEmpty()) {
+            params.put(KerberosAuthenticationHandler.KEYTAB, keytab);
+        }
+        String rule = conf.getString(WebConfigKey.HAS_AUTHENTICATION_KERBEROS_NAME_RULES);
+        if (rule != null && !rule.isEmpty()) {
+            params.put(KerberosAuthenticationHandler.NAME_RULES, rule);
+        } else {
+            params.put(KerberosAuthenticationHandler.NAME_RULES, "DEFAULT");
+        }
+        return params;
+    }
+
+    public InetSocketAddress getBindAddress() {
+        if (httpAddress != null) {
+            return httpAddress;
+        } else if (httpsAddress != null) {
+            return httpsAddress;
+        } else {
+            return null;
+        }
+    }
+
+    /**
+     * for information related to the different configuration options and
+     * Http Policy is decided.
+     */
+    public void start() throws HasException {
+
+        HttpConfig.Policy policy = getHttpPolicy(conf);
+
+        final String bindHost =
+            conf.getString(WebConfigKey.HAS_HTTPS_BIND_HOST_KEY);
+        InetSocketAddress httpAddr = null;
+        if (policy.isHttpEnabled()) {
+            final String httpAddrString = conf.getString(
+                WebConfigKey.HAS_HTTP_ADDRESS_KEY,
+                WebConfigKey.HAS_HTTP_ADDRESS_DEFAULT);
+            httpAddr = NetUtils.createSocketAddr(httpAddrString);
+            if (bindHost != null && !bindHost.isEmpty()) {
+                httpAddr = new InetSocketAddress(bindHost, httpAddr.getPort());
+            }
+            LOG.info("Get the http address: " + httpAddr);
+        }
+
+        InetSocketAddress httpsAddr = null;
+        if (policy.isHttpsEnabled()) {
+            final String httpsAddrString = conf.getString(
+                WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
+                WebConfigKey.HAS_HTTPS_ADDRESS_DEFAULT);
+            httpsAddr = NetUtils.createSocketAddr(httpsAddrString);
+
+            if (bindHost != null && !bindHost.isEmpty()) {
+                httpsAddr = new InetSocketAddress(bindHost, httpsAddr.getPort());
+            }
+            LOG.info("Get the https address: " + httpsAddr);
+        }
+
+        HttpServer2.Builder builder = httpServerTemplateForHAS(conf, httpAddr, httpsAddr, "has");
+
+        try {
+            httpServer = builder.build();
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when building http server. " + e.getMessage());
+        }
+
+        init();
+
+        try {
+            httpServer.start();
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when starting http server. " + e.getMessage());
+        }
+        int connIdx = 0;
+        if (policy.isHttpEnabled()) {
+            httpAddress = httpServer.getConnectorAddress(connIdx++);
+            conf.setString(WebConfigKey.HAS_HTTP_ADDRESS_KEY,
+                NetUtils.getHostPortString(httpAddress));
+        }
+
+        if (policy.isHttpsEnabled()) {
+            httpsAddress = httpServer.getConnectorAddress(connIdx);
+            conf.setString(WebConfigKey.HAS_HTTPS_ADDRESS_KEY,
+                NetUtils.getHostPortString(httpsAddress));
+        }
+    }
+
+    public void setWebServerAttribute(HasServer hasServer) {
+        httpServer.setAttribute(HAS_SERVER_ATTRIBUTE_KEY, hasServer);
+    }
+
+    public static HasServer getHasServerFromContext(ServletContext context) {
+        return (HasServer) context.getAttribute(HAS_SERVER_ATTRIBUTE_KEY);
+    }
+
+    /**
+     * Get http policy.
+     */
+    public HttpConfig.Policy getHttpPolicy(HasConfig conf) {
+        String policyStr = conf.getString(WebConfigKey.HAS_HTTP_POLICY_KEY,
+            WebConfigKey.HAS_HTTP_POLICY_DEFAULT);
+        HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
+        if (policy == null) {
+            throw new HadoopIllegalArgumentException("Unregonized value '"
+                + policyStr + "' for " + WebConfigKey.HAS_HTTP_POLICY_KEY);
+        }
+
+        conf.setString(WebConfigKey.HAS_HTTP_POLICY_KEY, policy.name());
+        return policy;
+    }
+
+    /**
+     * Return a HttpServer.Builder that the ssm can use to
+     * initialize their HTTP / HTTPS server.
+     */
+    public HttpServer2.Builder httpServerTemplateForHAS(
+        HasConfig conf, final InetSocketAddress httpAddr, final InetSocketAddress httpsAddr,
+        String name) throws HasException {
+        HttpConfig.Policy policy = getHttpPolicy(conf);
+
+        HttpServer2.Builder builder = new HttpServer2.Builder().setName(name);
+
+        if (policy.isHttpEnabled()) {
+            if (httpAddr.getPort() == 0) {
+                builder.setFindPort(true);
+            }
+
+            URI uri = URI.create("http://" + NetUtils.getHostPortString(httpAddr));
+            builder.addEndpoint(uri);
+            LOG.info("Starting Web-server for " + name + " at: " + uri);
+        }
+
+        if (policy.isHttpsEnabled() && httpsAddr != null) {
+            HasConfig sslConf = loadSslConfiguration(conf);
+            loadSslConfToHttpServerBuilder(builder, sslConf);
+
+            if (httpsAddr.getPort() == 0) {
+                builder.setFindPort(true);
+            }
+
+            URI uri = URI.create("https://" + NetUtils.getHostPortString(httpsAddr));
+            builder.addEndpoint(uri);
+            LOG.info("Starting Web-server for " + name + " at: " + uri);
+        }
+
+        return builder;
+    }
+
+    /**
+     * Load HTTPS-related configuration.
+     */
+    public HasConfig loadSslConfiguration(HasConfig conf) throws HasException {
+        HasConfig sslConf = new HasConfig();
+
+        String sslConfigString = conf.getString(
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT);
+        LOG.info("Get the ssl config file: " + sslConfigString);
+        try {
+            sslConf.addIniConfig(new File(sslConfigString));
+        } catch (IOException e) {
+            throw new HasException("Errors occurred when adding config. " + e.getMessage());
+        }
+
+        final String[] reqSslProps = {
+            WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_LOCATION_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_LOCATION_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY,
+            WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY
+        };
+
+        // Check if the required properties are included
+        for (String sslProp : reqSslProps) {
+            if (sslConf.getString(sslProp) == null) {
+                LOG.warn("SSL config " + sslProp + " is missing. If "
+                    + WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY
+                    + " is specified, make sure it is a relative path");
+            }
+        }
+
+        boolean requireClientAuth = conf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
+            WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
+        sslConf.setBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY, requireClientAuth);
+        return sslConf;
+    }
+
+    public HttpServer2.Builder loadSslConfToHttpServerBuilder(HttpServer2.Builder builder,
+                                                              HasConfig sslConf) {
+        return builder
+            .needsClientAuth(
+                sslConf.getBoolean(WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_KEY,
+                    WebConfigKey.HAS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
+            .keyPassword(getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYPASSWORD_KEY))
+            .keyStore(sslConf.getString("ssl.server.keystore.location"),
+                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY),
+                sslConf.getString("ssl.server.keystore.type", "jks"))
+            .trustStore(sslConf.getString("ssl.server.truststore.location"),
+                getPassword(sslConf, WebConfigKey.HAS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY),
+                sslConf.getString("ssl.server.truststore.type", "jks"))
+            .excludeCiphers(
+                sslConf.getString("ssl.server.exclude.cipher.list"));
+    }
+
+    /**
+     * Leverages the Configuration.getPassword method to attempt to get
+     * passwords from the CredentialProvider API before falling back to
+     * clear text in config - if falling back is allowed.
+     *
+     * @param conf  Configuration instance
+     * @param alias name of the credential to retreive
+     * @return String credential value or null
+     */
+    public String getPassword(HasConfig conf, String alias) {
+
+        return conf.getString(alias);
+    }
+
+    public void stop() throws Exception {
+        if (httpServer != null) {
+            httpServer.stop();
+        }
+    }
+
+    public InetSocketAddress getHttpAddress() {
+        return httpAddress;
+    }
+
+    public InetSocketAddress getHttpsAddress() {
+        return httpsAddress;
+    }
+}

http://git-wip-us.apache.org/repos/asf/directory-kerby/blob/be580566/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
----------------------------------------------------------------------
diff --git a/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
new file mode 100644
index 0000000..a6fc4ce
--- /dev/null
+++ b/has/has-server/src/main/java/org/apache/hadoop/has/server/web/rest/ConfApi.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.has.server.web.rest;
+
+import org.apache.hadoop.has.common.HasConfig;
+import org.apache.hadoop.has.common.HasException;
+import org.apache.hadoop.has.common.util.HasUtil;
+import org.apache.hadoop.has.server.HasServer;
+import org.apache.hadoop.has.server.web.WebServer;
+import org.apache.kerby.kerberos.kerb.KrbException;
+import org.apache.kerby.kerberos.kerb.identity.backend.BackendConfig;
+import org.apache.kerby.kerberos.kerb.server.KdcUtil;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * HAS configure web methods implementation.
+ */
+@Path("/conf")
+public class ConfApi {
+
+    @Context
+    private ServletContext context;
+
+    @Context
+    private HttpServletRequest httpRequest;
+
+    /**
+     * Set HAS plugin.
+     *
+     * @param plugin HAS plugin name
+     * @return Response
+     */
+    @PUT
+    @Path("/setplugin")
+    @Consumes({MediaType.TEXT_PLAIN})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response setPlugin(@QueryParam("plugin") final String plugin) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            WebServer.LOG.info("Set HAS plugin...");
+            try {
+                Map<String, String> values = new HashMap<>();
+                File hasConfFile = new File(hasServer.getConfDir(), "has-server.conf");
+                HasConfig hasConfig = HasUtil.getHasConfig(hasConfFile);
+                if (hasConfig != null) {
+                    String defaultValue = hasConfig.getPluginName();
+                    values.put(defaultValue, plugin);
+                } else {
+                    throw new RuntimeException("has-server.conf not found. ");
+                }
+                hasServer.updateConfFile("has-server.conf", values);
+            } catch (IOException | HasException e) {
+                throw new RuntimeException("Failed to set HAS plugin. ", e);
+            }
+            WebServer.LOG.info("HAS plugin set successfully.");
+
+            return Response.status(200).entity("HAS plugin set successfully.\n").build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Config HAS server backend.
+     *
+     * @param backendType type of backend
+     * @param dir         json dir
+     * @param driver      mysql JDBC connector driver
+     * @param url         mysql JDBC connector url
+     * @param user        mysql user name
+     * @param password    mysql password of user
+     * @return Response
+     */
+    @PUT
+    @Path("/configkdcbackend")
+    @Consumes({MediaType.APPLICATION_JSON})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response configKdcBackend(
+        @QueryParam("backendType") final String backendType,
+        @QueryParam("dir") @DefaultValue("/tmp/has/jsonbackend") final String dir,
+        @QueryParam("driver") @DefaultValue("com.mysql.jdbc.Driver") final String driver,
+        @QueryParam("url") @DefaultValue("jdbc:mysql://127.0.0.1:3306/mysqlbackend") final String url,
+        @QueryParam("user") @DefaultValue("root") final String user,
+        @QueryParam("password") @DefaultValue("passwd") final String password) {
+
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            if ("json".equals(backendType)) {
+                WebServer.LOG.info("Set Json backend...");
+                try {
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_JAR_", "org.apache.kerby.kerberos.kdc.identitybackend.JsonIdentityBackend");
+                    values.put("#_JSON_DIR_", "backend.json.dir = " + dir);
+                    values.put("#_MYSQL_\n", "");
+                    hasServer.updateConfFile("backend.conf", values);
+                } catch (IOException | HasException e) {
+                    throw new RuntimeException("Failed to set Json backend. ", e);
+                }
+                WebServer.LOG.info("Json backend set successfully.");
+
+                return Response.status(200).entity("Json backend set successfully.\n").build();
+            } else if ("mysql".equals(backendType)) {
+                WebServer.LOG.info("Set MySQL backend...");
+                try {
+                    String mysqlConfig = "mysql_driver = " + driver + "\nmysql_url = " + url
+                        + "\nmysql_user = " + user + "\nmysql_password = " + password;
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_JAR_", "org.apache.hadoop.has.server.kdc.MySQLIdentityBackend");
+                    values.put("#_JSON_DIR_\n", "");
+                    values.put("#_MYSQL_", mysqlConfig);
+                    hasServer.updateConfFile("backend.conf", values);
+                } catch (IOException | HasException e) {
+                    throw new RuntimeException("Failed to set MySQL backend. ", e);
+                }
+                WebServer.LOG.info("MySQL backend set successfully.");
+
+                return Response.status(200).entity("MySQL backend set successfully.\n").build();
+            } else {
+                return Response.status(400).entity(backendType + " is not supported.\n").build();
+            }
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+
+    /**
+     * Config HAS server KDC.
+     * @param port KDC port to set
+     * @param realm KDC realm to set
+     * @param host KDC host to set
+     * @return Response
+     */
+    @PUT
+    @Path("/configkdc")
+    @Consumes({MediaType.TEXT_PLAIN})
+    @Produces({MediaType.TEXT_PLAIN})
+    public Response configKdc(
+        @QueryParam("port") final int port,
+        @QueryParam("realm") final String realm,
+        @QueryParam("host") final String host) {
+        if (httpRequest.isSecure()) {
+            final HasServer hasServer = WebServer.getHasServerFromContext(context);
+            WebServer.LOG.info("Config HAS server KDC...");
+            try {
+                BackendConfig backendConfig = KdcUtil.getBackendConfig(hasServer.getConfDir());
+                String backendJar = backendConfig.getString("kdc_identity_backend");
+                if (backendJar.equals("org.apache.hadoop.has.server.kdc.MySQLIdentityBackend")) {
+                    hasServer.configMySQLKdc(backendConfig, realm, port, host, hasServer);
+                } else {
+                    Map<String, String> values = new HashMap<>();
+                    values.put("_HOST_", host);
+                    values.put("_PORT_", String.valueOf(port));
+                    values.put("_REALM_", realm);
+                    hasServer.updateConfFile("kdc.conf", values);
+                    String kdc = "\t\tkdc = " + host + ":" + port;
+                    values.put("_KDCS_", kdc);
+                    values.put("_UDP_LIMIT_", "4096");
+                    hasServer.updateConfFile("krb5.conf", values);
+                }
+            } catch (IOException | HasException | KrbException e) {
+                throw new RuntimeException("Failed to config HAS KDC. ", e);
+            }
+            WebServer.LOG.info("HAS server KDC set successfully.");
+            return Response.status(200).entity("HAS server KDC set successfully.\n").build();
+        }
+        return Response.status(403).entity("HTTPS required.\n").build();
+    }
+}